diff --git a/.travis.yml b/.travis.yml
index dbf2b13996862bc149df2ddf717a03b8400b45ec..e085b7f64ef0c723386f6fe7ffe897ecef47c02a 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,20 +2,25 @@ os:
   - linux
   - osx
 
-language: c++
+language: python
+    - "2.7"
 
 before_install:
-    - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran; fi
-    - if [ "$TRAVIS_OS_NAME" = "linux" ]; then pushd $HOME && mkdir cmake3.1 && cd cmake3.1 && (curl -L "http://cmake.org/files/v3.1/cmake-3.1.0-Linux-x86_64.tar.gz" | gunzip -c | tar x) && cd cmake-*/bin && export PATH="${PWD}:${PATH}"; popd; fi
-    - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; brew outdated cmake || brew upgrade cmake ; fi
-    - cmake --version
+  - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran xvfb; fi
+  - if [ "$TRAVIS_OS_NAME" = "linux" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; fi
+  - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; fi 
+  - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-MacOSX-x86_64.sh -O miniconda.sh; fi
+  - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://github.com/UV-CDAT/uvcdat/releases/download/v2.4.1/gfortran-4.9.2-Mac.tar.gz -O ~/gfortran-4.9.2-Mac.tar.gz ; pushd / ; sudo tar xzvf ~/gfortran-4.9.2-Mac.tar.gz ; pushd ; fi
+  - export PATH="$HOME/miniconda/bin:$PATH"
+  - bash miniconda.sh -b -p $HOME/miniconda
+  - conda config --set always_yes yes --set changeps1 no
+  - conda update -y -q conda
+  - conda install openssl=1.0.2d
 
 script:
-    - git submodule init
-    - git submodule update
     - cd ..
     - mkdir _build
     - cd _build
-    - cmake -DGIT_PROTOCOL=git:// -DCDAT_BUILD_MODE=LEAN -DCDAT_BUILD_GRAPHICS=ON -DCDAT_BUILD_SCIPY=OFF ../uvcdat
-    - ctest -VV -S ../uvcdat/CMake/travis_build.cmake
-    - ctest -VV -S ../uvcdat/CMake/travis_submit.cmake
+    - cmake -DGIT_PROTOCOL=git:// ../uvcdat
+    - make
+    - ctest -j8 -D Experimental
diff --git a/CMake/ESMP.patch b/CMake/ESMP.patch
deleted file mode 100644
index 9d1eb2c07f51ff431ffed2be0dbcd0521d070a13..0000000000000000000000000000000000000000
--- a/CMake/ESMP.patch
+++ /dev/null
@@ -1,33 +0,0 @@
---- a/ESMP_LoadESMF.py	2014-01-14 10:00:22.000000000 -0500
-+++ b/ESMP_LoadESMF.py	2014-01-14 10:40:57.000000000 -0500
-@@ -64,6 +64,14 @@
- #      esmfmk = c[2]
- 
-   try:
-+
-+    # If we are not dealing with an absolute path treat it a relative to the
-+    # current Python module.
-+    if not os.path.isabs(esmfmk):
-+      # Get the directory for this module
-+      rel_dir = os.path.dirname(os.path.realpath(__file__))
-+      esmfmk = os.path.abspath(os.path.join(rel_dir, esmfmk))
-+
-     MKFILE = open(esmfmk, 'r')
-   except:
-     raise IOError("File not found\n  %s") % esmfmk
-@@ -72,11 +80,12 @@
-   libsdir = 0
-   esmfos = 0
-   esmfabi = 0
-+
-+  libsdir = os.path.dirname(esmfmk)
-+
- #  MKFILE = open(esmfmk,'r')
-   for line in MKFILE:
--    if 'ESMF_LIBSDIR' in line:
--      libsdir = line.split("=")[1]
--    elif 'ESMF_OS:' in line:
-+    if 'ESMF_OS:' in line:
-       esmfos = line.split(":")[1]
-     elif 'ESMF_ABI:' in line:
-       esmfabi = line.split(":")[1]
diff --git a/CMake/cdat_modules/basemap_deps.cmake b/CMake/cdat_modules/basemap_deps.cmake
deleted file mode 100644
index 98520d1d08c5aa1a59f9879c362e8d5251df1910..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/basemap_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(basemap_deps ${matplotlib_pkg} ${geos_pkg})
diff --git a/CMake/cdat_modules/basemap_external.cmake b/CMake/cdat_modules/basemap_external.cmake
deleted file mode 100644
index 53b3a59a1b164d5f9db946703b52c107efa5ae64..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/basemap_external.cmake
+++ /dev/null
@@ -1,39 +0,0 @@
-# The basemap external project 
-
-set(basemap_binary "${CMAKE_CURRENT_BINARY_DIR}/build/basemap")
-
-#configure_file(
-#  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_configure_step.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake @ONLY)
-# to build we also run a cmake -P script.
-# the script will set LD_LIBRARY_PATH so that 
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake @ONLY)
-
-#set(basemap_CONFIGURE_COMMAND ${CMAKE_COMMAND}
-#    -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake)
-set(basemap_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake)
-set(basemap_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake)
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(basemap
-  URL ${basemap_URL}/${basemap_GZ}
-  URL_MD5 ${basemap_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${basemap_binary}
-  BINARY_DIR ${basemap_binary}
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${basemap_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${basemap_INSTALL_COMMAND}
-  DEPENDS
-    ${basemap_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/basemap_pkg.cmake b/CMake/cdat_modules/basemap_pkg.cmake
deleted file mode 100644
index bfcaa6c07b3d4af7a2f39141bbdd6870e50feaae..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/basemap_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set( basemap_MAJOR 1  )
-set( basemap_MINOR 0  )
-set( basemap_PATCH 5  )
-set(basemap_URL ${LLNL_URL})
-set(basemap_GZ basemap-${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH}.tar.gz)
-set(basemap_MD5 089260ea2b3eebb9d63e1783d0b15298 )
-set(BASEMAP_VERSION ${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH})
-set(BASEMAP_SOURCE ${basemap_URL}/${basemap_GZ})
-
-add_cdat_package_dependent(basemap "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/cairo_deps.cmake b/CMake/cdat_modules/cairo_deps.cmake
deleted file mode 100644
index 78b7fe0b3d38ecba9f0a82f766a758b50f4fae50..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cairo_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Cairo_deps ${pkgconfig_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libxml2_pkg})
diff --git a/CMake/cdat_modules/cairo_external.cmake b/CMake/cdat_modules/cairo_external.cmake
deleted file mode 100644
index 1826425c1eb83ae14c2047965cc093855c632fa7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cairo_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-
-set(Cairo_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cairo")
-set(Cairo_install "${cdat_EXTERNALS}")
-set(Cairo_conf_args --disable-static^^--enable-quartz=no^^--enable-win32=no^^--enable-skia=no^^--enable-os2=no^^--enable-beos=no^^--enable-drm=no^^--enable-gallium=no^^--enable-cogl=no)
-
-ExternalProject_Add(Cairo
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Cairo_source}
-  INSTALL_DIR ${Cairo_install}
-  URL ${CAIRO_URL}/${CAIRO_GZ}
-  URL_MD5 ${CAIRO_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DSKIP_LDFLAGS=YES -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${Cairo_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${Cairo_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/cairo_pkg.cmake b/CMake/cdat_modules/cairo_pkg.cmake
deleted file mode 100644
index be1bcce3ca372ea84a1c81a23236fdcdc29172f7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cairo_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(CAIRO_MAJOR 1)
-set(CAIRO_MINOR 10)
-set(CAIRO_PATCH 2)
-set(CAIRO_MAJOR_SRC 1)
-set(CAIRO_MINOR_SRC 12)
-set(CAIRO_PATCH_SRC 14)
-set(CAIRO_URL ${LLNL_URL})
-set(CAIRO_GZ cairo-${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC}.tar.gz)
-set(CAIRO_MD5 4a55de6dbbd2d22eee9eea78e6bdbbfd )
-set(CAIRO_SOURCE ${CAIRO_URL}/${CAIRO_GZ})
-set(CAIRO_VERSION ${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC})
-
-add_cdat_package_dependent(Cairo "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
-
diff --git a/CMake/cdat_modules/cd77_deps.cmake b/CMake/cdat_modules/cd77_deps.cmake
deleted file mode 100644
index e18cdbd1df97ed8ab0933ebf8ba0802d517ec3df..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cd77_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(cd77_deps ${python_pkg} ${ezget_pkg} ${libcdms_pkg} ${setuptools_pkg} )
diff --git a/CMake/cdat_modules/cd77_external.cmake b/CMake/cdat_modules/cd77_external.cmake
deleted file mode 100644
index 00e3b0833b9707c1ce0df6f74812e8bbeccf2f73..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cd77_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# create an external project to install cd77
-# and configure and build it
-
-ExternalProject_Add(cd77
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/cd77
-  ${GIT_CMD_STR_cd77}
-  ${GIT_TAG_cd77}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} PYTHONPATH=${PYTHONPATH} ${USR_ENVS} ${PYTHON_EXECUTABLE} setup.py install ${USER_INSTALL_OPTIONS} ${PRFX}
-  DEPENDS ${${nm}_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/cd77_pkg.cmake b/CMake/cdat_modules/cd77_pkg.cmake
deleted file mode 100644
index 3dc195aec980bff5bb9b6262e7cd71c65d661794..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cd77_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(cd77_VERSION 1.0.0)
-set(cd77_BRANCH master)
-set(cd77_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/cd77.git )
-
-
-set(GIT_CMD_STR_cd77 GIT_REPOSITORY ${cd77_REPOSITORY})
-set(GIT_TAG_cd77 GIT_TAG "${cd77_BRANCH}")
-set (nm cd77)
-string(TOUPPER ${nm} uc_nm)
-
-if (CDAT_BUILD_PCMDI)
-  add_cdat_package(cd77 "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/cdat_deps.cmake b/CMake/cdat_modules/cdat_deps.cmake
index 70d44f426f465d20c35c5631e9660b77d2c4139f..dcb9b307a6b23d0762847a509ebaa2606cea405c 100644
--- a/CMake/cdat_modules/cdat_deps.cmake
+++ b/CMake/cdat_modules/cdat_deps.cmake
@@ -1,16 +1 @@
-set(CDAT_deps ${wget_pkg} ${python_pkg} ${numpy_pkg}
-              ${libcdms_pkg}
-              ${libcf_pkg} ${netcdf_pkg} ${myproxyclient_pkg} ${udunits2_pkg})
-if (CDAT_BUILD_GRAPHICS)
-  if (CDAT_BUILD_PARAVIEW)
-    list(APPEND CDAT_deps ${paraview_pkg})
-  else()
-    list(APPEND CDAT_deps ${vtk_pkg})
-  endif()
-  list(APPEND CDAT_deps ${ffmpeg_pkg})
-endif()
-
-if (CDAT_BUILD_ESMF)
-    list(APPEND CDAT_deps ${esmf_pkg})
-endif()
-
+set(CDAT_deps)
diff --git a/CMake/cdat_modules/cdat_external.cmake b/CMake/cdat_modules/cdat_external.cmake
index 7b1b53f9bf4d9ba6762188cabd0a897d6933f3b9..2f79aa5070229418e5d95d720fee5687ae92e00d 100644
--- a/CMake/cdat_modules/cdat_external.cmake
+++ b/CMake/cdat_modules/cdat_external.cmake
@@ -1,18 +1,5 @@
 set(CDAT_source "${cdat_SOURCE_DIR}")
-
-set(RUNTIME_FLAGS ${cdat_EXTERNALS}/lib)
-set(LDFLAGS -L${cdat_EXTERNALS}/lib)
-
-if (CDAT_BUILD_LIBDRS)
- set(cdat_xtra_flags "${cdat_xtra_flags} --enable-drs")
-endif()
-
-set(cdat_build_dir ${CMAKE_CURRENT_BINARY_DIR}/cdat-build)
-
 set(WORKING_DIR "${cdat_CMAKE_BINARY_DIR}")
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_python_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake
-  @ONLY)
 
 ExternalProject_Add(CDAT
   DOWNLOAD_DIR ""
@@ -22,7 +9,7 @@ ExternalProject_Add(CDAT
   PATCH_COMMAND ""
   CONFIGURE_COMMAND ""
   BUILD_COMMAND ""
-  INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" ${CMAKE_COMMAND} -DPYTHON_INSTALL_ARGS=${cdat_xtra_flags} -P ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake
+  INSTALL_COMMAND ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash
   DEPENDS ${CDAT_deps}
   ${ep_log_options}
 )
diff --git a/CMake/cdat_modules/cdat_pkg.cmake b/CMake/cdat_modules/cdat_pkg.cmake
index 05a66faf5a35235e28acb567035e3c93d2f49d4e..92aa4ed99433186f03596b8c005a4fc369242198 100644
--- a/CMake/cdat_modules/cdat_pkg.cmake
+++ b/CMake/cdat_modules/cdat_pkg.cmake
@@ -1,5 +1,5 @@
 set(cdat_VERSION_MAJOR 2)
-set(cdat_VERSION_MINOR 2)
+set(cdat_VERSION_MINOR 6)
 set(cdat_VERSION_PATCH 0)
 set(cdat_VERSION ${cdat_VERSION_MAJOR}.${cdat_VERSION_MINOR}.${cdat_VERSION_PATCH})
 
@@ -30,6 +30,5 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/version.in
   ${cdat_BINARY_DIR}/version
   @ONLY
 )
-
 add_cdat_package(CDAT "" "" ON)
 
diff --git a/CMake/cdat_modules/cdatlogger_deps.cmake b/CMake/cdat_modules/cdatlogger_deps.cmake
deleted file mode 100644
index c2cfeeb0b4a825499cb44f59486067ded1e0c76b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cdatlogger_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-set (CDATLogger_deps ${cdat_pkg})
-
diff --git a/CMake/cdat_modules/cdatlogger_external.cmake b/CMake/cdat_modules/cdatlogger_external.cmake
deleted file mode 100644
index 69cb09c1aeddf3bf960daf475f094e346fbb970b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cdatlogger_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cmake_modules/CDATLogger.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake
-  @ONLY)
-
-ExternalProject_Add(CDATLogger
-  DOWNLOAD_DIR ""
-  SOURCE_DIR ${cdat_SOURCE_DIR}
-  BINARY_DIR ${cdat_build_dir}
-  BUILD_IN_SOURCE 0
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake
-  DEPENDS ${CDATLogger_deps}
-)
diff --git a/CMake/cdat_modules/cdatlogger_pkg.cmake b/CMake/cdat_modules/cdatlogger_pkg.cmake
deleted file mode 100644
index f9e19bd05bc41b1ac0becb2da9c534f56de6e379..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cdatlogger_pkg.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(CDATLOGGER_VERSION N/A)
-set(CDATLOGGER_SOURCE N/A)
-
-add_cdat_package(CDATLogger "" "" OFF)
diff --git a/CMake/cdat_modules/cffi_deps.cmake b/CMake/cdat_modules/cffi_deps.cmake
deleted file mode 100644
index 3e062051065e6a472664f9c95b7aebc505d020e9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cffi_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CFFI_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} )
diff --git a/CMake/cdat_modules/cffi_external.cmake b/CMake/cdat_modules/cffi_external.cmake
deleted file mode 100644
index 1fdb495c4c361bdda77db27aeec32718f04faf6b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cffi_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CFFI)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/cffi_pkg.cmake b/CMake/cdat_modules/cffi_pkg.cmake
deleted file mode 100644
index 889da6bb71a086e36a945fe6eec1504237b85f3f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cffi_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CFFI_MAJOR_SRC 1)
-set(CFFI_MINOR_SRC 5)
-set(CFFI_PATCH_SRC 2)
-
-set(CFFI_VERSION ${CFFI_MAJOR_SRC}.${CFFI_MINOR_SRC}.${CFFI_PATCH_SRC})
-set(CFFI_GZ cffi-${CFFI_VERSION}.tar.gz)
-set(CFFI_SOURCE ${LLNL_URL}/${CFFI_GZ})
-set(CFFI_MD5 fa766133f7299464c8bf857e0c966a82)
-
-add_cdat_package_dependent(CFFI "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/clapack_deps.cmake b/CMake/cdat_modules/clapack_deps.cmake
deleted file mode 100644
index e0f544de869ebe03c05ae19fe2294f7ea4d86f81..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/clapack_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CLAPACK_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/clapack_external.cmake b/CMake/cdat_modules/clapack_external.cmake
deleted file mode 100644
index bab6e0f94724058a722a08fe3865907cd0bb5129..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/clapack_external.cmake
+++ /dev/null
@@ -1,39 +0,0 @@
-# The CLAPACK external project
-
-set(clapack_source "${CMAKE_CURRENT_BINARY_DIR}/LAPACK")
-set(clapack_binary "${CMAKE_CURRENT_BINARY_DIR}/LAPACK-build")
-set(clapack_install "${cdat_EXTERNALS}")
-set(NUMPY_LAPACK_binary ${clapack_binary})
-
-#
-# To fix compilation problem: relocation R_X86_64_32 against `a local symbol' can not be
-# used when making a shared object; recompile with -fPIC
-# See http://www.cmake.org/pipermail/cmake/2007-May/014350.html
-#
-if(UNIX AND CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64")
-  set(cdat_tpl_c_flags_LAPACK "-fPIC ${cdat_tpl_c_flags}")
-endif()
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/CLAPACK_install_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake
-    @ONLY)
-
-set(CLAPACK_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake)
-
-ExternalProject_Add(CLAPACK
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${clapack_source}
-  BINARY_DIR ${clapack_binary}
-  INSTALL_DIR ${clapack_install}
-  URL ${CLAPACK_URL}/${CLAPACK_GZ}
-  URL_MD5 ${CLAPACK_MD5}
-  CMAKE_CACHE_ARGS
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-    ${CLAPACK_EXTRA_ARGS}
-  INSTALL_COMMAND ${CLAPACK_INSTALL_COMMAND}
-  DEPENDS ${CLAPACK_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/clapack_pkg.cmake b/CMake/cdat_modules/clapack_pkg.cmake
deleted file mode 100644
index 38f156a44a89e702716c6e5bc9f833ba8198375d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/clapack_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(CLAPACK_MAJOR 3)
-set(CLAPACK_MINOR 2)
-set(CLAPACK_PATCH 1)
-set(CLAPACK_VERSION ${CLAPACK_MAJOR}.${CLAPACK_MINOR}.${CLAPACK_PATCH})
-set(CLAPACK_URL http://www.netlib.org/clapack)
-set(CLAPACK_GZ clapack-${CLAPACK_VERSION}-CMAKE.tgz)
-set(CLAPACK_MD5 4fd18eb33f3ff8c5d65a7d43913d661b)
-set(CLAPACK_SOURCE ${CLAPACK_URL}/${CLAPACK_GZ})
-
-if(NOT APPLE)
-  if(NOT CMAKE_Fortran_COMPILER)
-    add_cdat_package(CLAPACK "" "" OFF)
-  endif()
-endif()
-
diff --git a/CMake/cdat_modules/click_deps.cmake b/CMake/cdat_modules/click_deps.cmake
deleted file mode 100644
index ee4e50d5d9ef7a32121492e5cda9cafb2349b6c5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/click_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CLICK_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} )
diff --git a/CMake/cdat_modules/click_external.cmake b/CMake/cdat_modules/click_external.cmake
deleted file mode 100644
index 0c89a3bf12bd7d276b92901267663342acf8e598..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/click_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CLICK)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/click_pkg.cmake b/CMake/cdat_modules/click_pkg.cmake
deleted file mode 100644
index b0aef777c6209d6e0f918697e357d8cfd2780c33..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/click_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(CLICK_MAJOR_SRC 4)
-set(CLICK_MINOR_SRC 1)
-
-set(CLICK_VERSION ${CLICK_MAJOR_SRC}.${CLICK_MINOR_SRC})
-set(CLICK_GZ click-${CLICK_VERSION}.tar.gz)
-set(CLICK_SOURCE ${LLNL_URL}/${CLICK_GZ})
-set(CLICK_MD5 6a3fa88c738f2f775ec6de126feb99a4)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(CLICK "" "" ON)
-else()
-  add_cdat_package(CLICK "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/cligj_deps.cmake b/CMake/cdat_modules/cligj_deps.cmake
deleted file mode 100644
index ce62bdb2fc5c0acdf23dd772f1a6d028ed6baf4b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cligj_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CLIGJ_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ${click_pkg} )
diff --git a/CMake/cdat_modules/cligj_external.cmake b/CMake/cdat_modules/cligj_external.cmake
deleted file mode 100644
index 805117570079aa101a6b29ade17e79f310003d31..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cligj_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CLIGJ)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/cligj_pkg.cmake b/CMake/cdat_modules/cligj_pkg.cmake
deleted file mode 100644
index 06adad50605cc617b92ed1085eaa97dde8a17c49..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cligj_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(CLIGJ_MAJOR_SRC 0)
-set(CLIGJ_MINOR_SRC 3)
-set(CLIGJ_PATCH_SRC 0)
-
-set(CLIGJ_VERSION ${CLIGJ_MAJOR_SRC}.${CLIGJ_MINOR_SRC}.${CLIGJ_PATCH_SRC})
-set(CLIGJ_GZ cligj-${CLIGJ_VERSION}.tar.gz)
-set(CLIGJ_SOURCE ${LLNL_URL}/${CLIGJ_GZ})
-set(CLIGJ_MD5 cd135f171b4ef2c07ebd34731ccf09a5)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(CLIGJ "" "" ON)
-else()
-  add_cdat_package(CLIGJ "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/cmcurl_external.cmake b/CMake/cdat_modules/cmcurl_external.cmake
deleted file mode 100644
index 8a6033f35a9a29aa65e7bc3b694fc43708a37273..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cmcurl_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-# The cmCurl external project for Titan
-
-set(curl_source "${CMAKE_CURRENT_SOURCE_DIR}/cmcurl")
-set(curl_binary "${CMAKE_CURRENT_BINARY_DIR}/cmcurl")
-
-ExternalProject_Add(cmcurl
-  DOWNLOAD_COMMAND ""
-  SOURCE_DIR "${curl_source}"
-  BINARY_DIR "${curl_binary}"
-  CMAKE_GENERATOR ${gen}
-  CMAKE_ARGS
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DBUILD_TESTING:BOOL=OFF
-    -DBUILD_CURL_TESTS:BOOL=OFF
-    -DBUILD_CURL_EXE:BOOL=OFF
-    -DCURL_DISABLE_LDAP:BOOL=ON
-    -DCURL_DISABLE_LDAPS:BOOL=ON
-    ${titan_compiler_args}
-    ${titan_binary_args}
-    ${cmcurl_EXTRA_ARGS}
-    -DTRIGGER_REBUILD:STRING=0
-  INSTALL_COMMAND ""
-  DEPENDS ${cmcurl_deps}
-)
diff --git a/CMake/cdat_modules/cmor_deps.cmake b/CMake/cdat_modules/cmor_deps.cmake
deleted file mode 100644
index 719a3c0015f780042e238e22608c1b8256f4aa4c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cmor_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CMOR_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${netcdf_pkg} ${zlib_pkg} ${uuid_pkg} ${udunits2_pkg} ${cdat_pkg})
diff --git a/CMake/cdat_modules/cmor_external.cmake b/CMake/cdat_modules/cmor_external.cmake
deleted file mode 100644
index 5cf40532283954eddcf29c2b8d7ea76c00b1a107..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cmor_external.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-set(cmor_source_dir  "${CMAKE_CURRENT_BINARY_DIR}/build/cmor")
-set(cmor_binary_dir  "${CMAKE_CURRENT_BINARY_DIR}/build/cmor")
-set(cmor_install_dir "${cdat_EXTERNALS}")
-
-set(cmor_configure_args --with-netcdf=${netcdf_install} --with-udunits2=${udunits_install} --with-uuid=${uuid_install} --enable-fortran=yes --with-python=${CMAKE_INSTALL_PREFIX} --prefix=${CMAKE_INSTALL_PREFIX})
-
-# it appears currently we only configure cmor but not build it.
-ExternalProject_Add(CMOR
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${cmor_source_dir}
-  BUILD_IN_SOURCE 1
-  ${GIT_CMD_STR_CMOR}
-  ${GIT_TAG}
-  INSTALL_DIR ${cmor_install_dir}
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND sh ${cmor_source_dir}/configure ${cmor_configure_args}
-  DEPENDS ${CMOR_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_CMOR)
-  unset(GIT_CMD_STR_CMOR)
-endif()
diff --git a/CMake/cdat_modules/cmor_pkg.cmake b/CMake/cdat_modules/cmor_pkg.cmake
deleted file mode 100644
index e3b785ac39bcec4cd7009a1b589d1ca5a5b8897e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cmor_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(CMOR_VERSION 2.9.2)
-set(CMOR_BRANCH uvcdat-2.4.0)
-set(CMOR_REPOSITORY ${GIT_PROTOCOL}github.com/PCMDI/cmor.git )
-
-set(GIT_CMD_STR_CMOR GIT_REPOSITORY ${CMOR_REPOSITORY})
-set(GIT_TAG GIT_TAG "${CMOR_BRANCH}")
-set (nm CMOR)
-string(TOUPPER ${nm} uc_nm)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(CMOR "" "" ON)
-else()
-  add_cdat_package_dependent(CMOR "" "" ON "CDAT_BUILD_CMOR" OFF)
-endif()
diff --git a/CMake/cdat_modules/configobj_deps.cmake b/CMake/cdat_modules/configobj_deps.cmake
deleted file mode 100644
index 1835fd3b45f3784379c44afee3b8ff22d2eae49a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/configobj_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(configobj_deps ${python_pkg} ${six_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/configobj_external.cmake b/CMake/cdat_modules/configobj_external.cmake
deleted file mode 100644
index ce77c8f039f146171278a7317b78a0c5e17d7277..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/configobj_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# configobj
-#
-set(configobj_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/configobj")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/configobj_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake"
-  @ONLY
-)
-
-set(configobj_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake)
-
-ExternalProject_Add(configobj
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${configobj_source_dir}
-  URL ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ}
-  URL_MD5 ${CONFIGOBJ_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${configobj_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${configobj_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/configobj_pkg.cmake b/CMake/cdat_modules/configobj_pkg.cmake
deleted file mode 100644
index c236b86948c42f4f241e515fd28024726ead2d9a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/configobj_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set( CONFIGOBJ_MAJOR 5 )
-set( CONFIGOBJ_MINOR 0 )
-set( CONFIGOBJ_PATCH 6 )
-set( CONFIGOBJ_VERSION ${CONFIGOBJ_MAJOR}.${CONFIGOBJ_MINOR}.${CONFIGOBJ_PATCH} )
-set( CONFIGOBJ_URL ${LLNL_URL} )
-set( CONFIGOBJ_GZ configobj-${CONFIGOBJ_VERSION}.tar.gz )
-set( CONFIGOBJ_MD5 e472a3a1c2a67bb0ec9b5d54c13a47d6 )
-
-set (nm CONFIGOBJ)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(CONFIGOBJ_SOURCE ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(configobj "" "" ON)
-else()
-  add_cdat_package(configobj "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/coverage_deps.cmake b/CMake/cdat_modules/coverage_deps.cmake
deleted file mode 100644
index d2744141ded9c695c90b378c5211289ae3d15d31..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/coverage_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(coverage_deps ${python_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/coverage_external.cmake b/CMake/cdat_modules/coverage_external.cmake
deleted file mode 100644
index e09cfb7fb5a15ad2cd125b7787dd0e41d43127ca..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/coverage_external.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-# External coverage.py package
-set(nm COVERAGE)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/coverage_pkg.cmake b/CMake/cdat_modules/coverage_pkg.cmake
deleted file mode 100644
index 7e32eaa610c58f3f5233fcefec64497331ad7df4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/coverage_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(COVERAGE_MAJOR_SRC 4)
-set(COVERAGE_MINOR_SRC 0)
-set(COVERAGE_PATCH_SRC 3)
-
-set(COVERAGE_VERSION ${COVERAGE_MAJOR_SRC}.${COVERAGE_MINOR_SRC}.${COVERAGE_PATCH_SRC})
-set(COVERAGE_GZ coverage-${COVERAGE_VERSION}.tar.gz)
-set(COVERAGE_SOURCE ${LLNL_URL}/${COVERAGE_GZ})
-set(COVERAGE_MD5 c7d3db1882484022c81bf619be7b6365)
-
-add_cdat_package_dependent(COVERAGE "" "" ON "CDAT_MEASURE_COVERAGE" OFF)
diff --git a/CMake/cdat_modules/cryptography_deps.cmake b/CMake/cdat_modules/cryptography_deps.cmake
deleted file mode 100644
index cad6e0ddd4e9897158f56615a1f6eca6d8cbf762..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cryptography_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CRYPTOGRAPHY_deps ${cffi_pkg} ${enum34_pkg} ${idna_pkg} ${ipaddress_pkg} ${openssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/cryptography_external.cmake b/CMake/cdat_modules/cryptography_external.cmake
deleted file mode 100644
index 9e10cb4bb74db92ef040125c12b041c810aab104..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cryptography_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CRYPTOGRAPHY)
-
-# Set LDFlags and CFlags to make it easier to find OpenSSL
-list(APPEND USR_ENVS
-  "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}"
-  "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}"
-  )
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/cryptography_pkg.cmake b/CMake/cdat_modules/cryptography_pkg.cmake
deleted file mode 100644
index cde5afa761c0205850abe4bcd0f0e2fc2dd33c51..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cryptography_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CRYPTOGRAPHY_MAJOR_SRC 1)
-set(CRYPTOGRAPHY_MINOR_SRC 2)
-set(CRYPTOGRAPHY_PATCH_SRC 2)
-
-set(CRYPTOGRAPHY_VERSION ${CRYPTOGRAPHY_MAJOR_SRC}.${CRYPTOGRAPHY_MINOR_SRC}.${CRYPTOGRAPHY_PATCH_SRC})
-set(CRYPTOGRAPHY_GZ cryptography-${CRYPTOGRAPHY_VERSION}.tar.gz)
-set(CRYPTOGRAPHY_SOURCE ${LLNL_URL}/${CRYPTOGRAPHY_GZ})
-set(CRYPTOGRAPHY_MD5 a8daf092d0558dac6700d7be93b555e5)
-
-add_cdat_package_dependent(CRYPTOGRAPHY "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/curl_deps.cmake b/CMake/cdat_modules/curl_deps.cmake
deleted file mode 100644
index 432b4d319c6c58612b413ae6a3e9e4c148709717..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/curl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CURL_deps ${pkgconfig_pkg} ${libXML2_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/curl_external.cmake b/CMake/cdat_modules/curl_external.cmake
deleted file mode 100644
index 6dd77c37388d0b3238b8a3d7275034d33f6ced2a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/curl_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(curl_source "${CMAKE_CURRENT_BINARY_DIR}/build/CURL")
-set(curl_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(CURL
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${curl_source}
-  INSTALL_DIR ${curl_install}
-  URL ${CURL_URL}/${CURL_GZ}
-  URL_MD5 ${CURL_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${CURL_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/curl_pkg.cmake b/CMake/cdat_modules/curl_pkg.cmake
deleted file mode 100644
index 6946ad58d3b2bbc7291346512477b53eee2bafad..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/curl_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(CURL_MAJOR 7)
-set(CURL_MINOR 22)
-set(CURL_PATCH 0)
-set(CURL_MAJOR_SRC 7)
-set(CURL_MINOR_SRC 33)
-set(CURL_PATCH_SRC 0)
-set(CURL_URL ${LLNL_URL})
-set(CURL_GZ curl-${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC}.tar.gz)
-set(CURL_MD5 c8a4eaac7ce7b0d1bf458d62ccd4ef93 )
-set(CURL_VERSION ${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC})
-set(CURL_SOURCE ${CURL_URL}/${CURL_GZ})
-
-add_cdat_package(CURL "" "" OFF)
diff --git a/CMake/cdat_modules/curses_deps.cmake b/CMake/cdat_modules/curses_deps.cmake
deleted file mode 100644
index 1926beb7c7ac25c3e360752b8f094419e954d8e4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/curses_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(curses_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/curses_external.cmake b/CMake/cdat_modules/curses_external.cmake
deleted file mode 100644
index 2fe0ea547b261b52dd674ad4c50ca8cfd6537c2f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/curses_external.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-set(curses_source "${CMAKE_CURRENT_BINARY_DIR}/build/curses")
-set(curses_install "${cdat_EXTERNALS}")
-set(curses_conf_args)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/curses_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake
-  @ONLY)
-
-set(curses_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake)
-
-ExternalProject_Add(Curses
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${curses_source}
-  INSTALL_DIR ${curses_install}
-  URL ${CURSES_URL}/${CURSES_GZ}
-  URL_MD5 ${CURSES_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${curses_PATCH_COMMAND}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${curses_conf_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${curses_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/curses_pkg.cmake b/CMake/cdat_modules/curses_pkg.cmake
deleted file mode 100644
index 29b6d52406983d8a413fcaf40b94122d15d31cdf..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/curses_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set (package Curses)
-string(TOUPPER ${package} package_uc)
-
-set(${package_uc}_MAJOR_SRC 6)
-set(${package_uc}_MINOR_SRC 0)
-set(${package_uc}_PATCH_SRC 0)
-set(${package_uc}_URL ${LLNL_URL})
-#set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}.tar.gz)
-set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.tar.gz)
-set(${pacakge_uc}_MD5 931959c0e1a9949999407b025cf44d3d)
-#set(${package_uc}_MD5 8cb9c412e5f2d96bc6f459aa8c6282a1)
-set(${package_uc}_SOURCE ${${package_uc}_URL}/${${package_uc}_GZ})
-set(${package_uc}_MD5 ${${package_uc}_MD5})
-
-set(${package_uc}_VERSION ${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC})
-add_cdat_package(${package} "" "" OFF)
diff --git a/CMake/cdat_modules/cycler_deps.cmake b/CMake/cdat_modules/cycler_deps.cmake
deleted file mode 100644
index 6c4db45355b8fbe025bee257cc257c700212e291..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cycler_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(cycler_deps ${python_pkg} ${setuptools_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/cycler_external.cmake b/CMake/cdat_modules/cycler_external.cmake
deleted file mode 100644
index 5cd06b6e89dc75a0f10e2593108f3e09025b7be0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cycler_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Cycler
-
-set(Cycler_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cycler")
-
-ExternalProject_Add(Cycler
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Cycler_source}
-  URL ${CYCLER_URL}/${CYCLER_GZ}
-  URL_MD5 ${CYCLER_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${cycler_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/cycler_pkg.cmake b/CMake/cdat_modules/cycler_pkg.cmake
deleted file mode 100644
index b2310801a38301783ba407afcfcbf0c9b3f2664a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cycler_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CYCLER_MAJOR 0)
-set(CYCLER_MINOR 9)
-set(CYCLER_PATCH 0)
-set(CYCLER_VERSION ${CYCLER_MAJOR}.${CYCLER_MINOR}.${CYCLER_PATCH})
-set(CYCLER_URL ${LLNL_URL})
-set(CYCLER_GZ cycler-${CYCLER_VERSION}.tar.gz)
-set(CYCLER_MD5 c10ade5ca3f0aadf575eb25203b225a5)
-set(CYCLER_SOURCE ${CYCLER_URL}/${CYCLER_GZ})
-
-add_cdat_package_dependent(Cycler "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/cython_deps.cmake b/CMake/cdat_modules/cython_deps.cmake
deleted file mode 100644
index eab0a78bef7206b5220a500cf0d1393696431593..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cython_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Cython_deps ${pkgconfig_pkg} ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/cython_external.cmake b/CMake/cdat_modules/cython_external.cmake
deleted file mode 100644
index a059bdd30e07340c7ecf4ca27a9801a7ee958c3f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cython_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-# Cython
-#
-# --old-and-unmangeable solution avoids the use of eggs
-# and  forces to create a directory.
-# this seems to fix issues of the type encountered in 
-# bug #1192 and #1486
-
-set(Cython_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cython")
-
-ExternalProject_Add(Cython
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Cython_source}
-  URL ${CYTHON_URL}/${CYTHON_GZ}
-  URL_MD5 ${CYTHON_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${Cython_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/cython_pkg.cmake b/CMake/cdat_modules/cython_pkg.cmake
deleted file mode 100644
index 3d1fe53d5af1adeeee504d3c66f822434b404e6d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/cython_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CYTHON_MAJOR 0)
-set(CYTHON_MINOR 23)
-set(CYTHON_PATCH 4)
-set(CYTHON_VERSION ${CYTHON_MAJOR}.${CYTHON_MINOR}.${CYTHON_PATCH})
-set(CYTHON_URL ${LLNL_URL} )
-set(CYTHON_GZ Cython-${CYTHON_VERSION}.tar.gz)
-set(CYTHON_MD5 157df1f69bcec6b56fd97e0f2e057f6e)
-set(CYTHON_SOURCE ${CYTHON_URL}/${CYTHON_GZ})
-
-add_cdat_package_dependent(Cython "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/data_deps.cmake b/CMake/cdat_modules/data_deps.cmake
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/data_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/CMake/cdat_modules/data_pkg.cmake b/CMake/cdat_modules/data_pkg.cmake
deleted file mode 100644
index d4be977a88dfd86fb23e2ba4313fd8bef8eb689f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/data_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-# Do we download the data ?
-option(CDAT_DOWNLOAD_SAMPLE_DATA "Download sample data" ON)
-if (CDAT_BUILD_LEAN)
-    message("[INFO] Disabling download data for ESGF")
-    set(CDAT_DOWNLOAD_SAMPLE_DATA OFF)
-endif()
-
-if (OFFLINE_BUILD)
-    message("[INFO] Disabling download data for offline build")
-    set(CDAT_DOWNLOAD_SAMPLE_DATA OFF)
-endif()
-
-if (CDAT_DOWNLOAD_SAMPLE_DATA)
-  set(SAMPLE_DATA "")
-else()
-  set(SAMPLE_DATA --disable-sampledata)
-endif()
diff --git a/CMake/cdat_modules/dateutils_deps.cmake b/CMake/cdat_modules/dateutils_deps.cmake
deleted file mode 100644
index 08ee1bda77ff9247aeae439a5b04717817fa4226..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/dateutils_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(DATEUTILS_deps ${python_pkg} ${pip_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/dateutils_external.cmake b/CMake/cdat_modules/dateutils_external.cmake
deleted file mode 100644
index e157432d1368921115bb70c5a66f9ccc7d005b98..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/dateutils_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-# --old-and-unmangeable solution avoids the use of eggs
-# and  forces to create a directory.
-# this seems to fix issues of the type encountered in 
-# bug #1192 and #1486
-
-set(nm DATEUTILS)
-set(USER_INSTALL_OPTIONS --old-and-unmanageable)
-include(pipinstaller)
-unset(USER_INSTALL_OPTIONS)
diff --git a/CMake/cdat_modules/dateutils_pkg.cmake b/CMake/cdat_modules/dateutils_pkg.cmake
deleted file mode 100644
index 9b1fe3fd57641c937a8e254404327fa513bdf2a1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/dateutils_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(DATEUTILS_MAJOR_SRC 2)
-set(DATEUTILS_MINOR_SRC 2)
-set(DATEUTILS_PATCH_SRC -)
-
-set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC}.${DATEUTILS_PATCH_SRC})
-set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC})
-set(DATEUTILS_GZ python-dateutil-${DATEUTILS_VERSION}.tar.gz)
-set(DATEUTILS_SOURCE ${LLNL_URL}/${DATEUTILS_GZ})
-set(DATEUTILS_MD5 c1f654d0ff7e33999380a8ba9783fd5c)
-
-add_cdat_package_dependent(DATEUTILS "" "" OFF "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/distribute_deps.cmake b/CMake/cdat_modules/distribute_deps.cmake
deleted file mode 100644
index d6313c1c931c725584efaaacf756cc7c0003e846..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/distribute_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(distribute_deps  ${setuptools_pkg})
diff --git a/CMake/cdat_modules/distribute_external.cmake b/CMake/cdat_modules/distribute_external.cmake
deleted file mode 100644
index c8f536fa01f46ec4288c0dc67f31405f1c98dac7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/distribute_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-if (NOT OFFLINE_BUILD) 
-    set(EGG_GZ "distribute==${DISTRIBUTE_VERSION}")
-else ()
-    set(EGG_GZ ${CDAT_PACKAGE_CACHE_DIR}/${DISTRIBUTE_GZ})
-endif()
-
-ExternalProject_Add(distribute
-  DOWNLOAD_COMMAND ""
-  WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${EGG_GZ}
-  DEPENDS ${distribute_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/distribute_pkg.cmake b/CMake/cdat_modules/distribute_pkg.cmake
deleted file mode 100644
index 2a0415f6b10ca7e08b40f49feab833ea53581a29..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/distribute_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(DISTRIBUTE_MAJOR_SRC 0)
-set(DISTRIBUTE_MINOR_SRC 6)
-set(DISTRIBUTE_PATCH_SRC 45)
-
-set (nm DISTRIBUTE)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(DISTRIBUTE_GZ distribute-${DISTRIBUTE_VERSION}.tar.gz)
-set(DISTRIBUTE_SOURCE ${LLNL_URL}/${DISTRIBUTE_GZ})
-set(DISTRIBUTE_MD5 8953f2c07e6700dabf2ec150129b8c31 )
-
-add_cdat_package(distribute "" "" OFF)
diff --git a/CMake/cdat_modules/docutils_deps.cmake b/CMake/cdat_modules/docutils_deps.cmake
deleted file mode 100644
index ef9fc3c52c4c28d99c0b032c7dc489973558159a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/docutils_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(docutils_deps ${pip_pkg} ${jinja2_pkg})
diff --git a/CMake/cdat_modules/docutils_external.cmake b/CMake/cdat_modules/docutils_external.cmake
deleted file mode 100644
index 36bdaedb11d75994ce585524c992f89817857a5e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/docutils_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-# create an external project to install docutils,
-# and configure and build it
-
-set(nm docutils)
-set(OLD "OFF")
-include(pipinstaller)
diff --git a/CMake/cdat_modules/docutils_pkg.cmake b/CMake/cdat_modules/docutils_pkg.cmake
deleted file mode 100644
index 1aaa2505d9e1c3a96b4497a4e5778063632217c1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/docutils_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(docutils_MAJOR_SRC 0)
-set(docutils_MINOR_SRC 10)
-set(docutils_PATCH_SRC )
-
-set (nm docutils)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC})
-set(DOCUTILS_GZ docutils-${DOCUTILS_VERSION}.tar.gz)
-set(DOCUTILS_SOURCE ${LLNL_URL}/${DOCUTILS_GZ})
-set(DOCUTILS_MD5 d8d4660c08302c791b2d71a155a2f4bc )
-
-add_cdat_package_dependent(docutils "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/enum34_deps.cmake b/CMake/cdat_modules/enum34_deps.cmake
deleted file mode 100644
index ed6c021a4aa465a3e5d79775eedc89c449539fe3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/enum34_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ENUM34_deps ${python_pkg})
diff --git a/CMake/cdat_modules/enum34_external.cmake b/CMake/cdat_modules/enum34_external.cmake
deleted file mode 100644
index 2edf14978eb0f50ba9a8b5a53f3f8ad16e8dc259..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/enum34_external.cmake
+++ /dev/null
@@ -1,8 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm ENUM34)
-
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/enum34_pkg.cmake b/CMake/cdat_modules/enum34_pkg.cmake
deleted file mode 100644
index b4a57ec13dd7a4a727f0189a4504db2f567530eb..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/enum34_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(ENUM34_MAJOR_SRC 1)
-set(ENUM34_MINOR_SRC 1)
-set(ENUM34_PATCH_SRC 2)
-
-set(ENUM34_VERSION ${ENUM34_MAJOR_SRC}.${ENUM34_MINOR_SRC}.${ENUM34_PATCH_SRC})
-set(ENUM34_GZ enum34-${ENUM34_VERSION}.tar.gz)
-set(ENUM34_SOURCE ${LLNL_URL}/${ENUM34_GZ})
-set(ENUM34_MD5 025bb71b3f9d2fad15d0ee53e48dc873)
-
-add_cdat_package_dependent(ENUM34 "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/eof2_deps.cmake b/CMake/cdat_modules/eof2_deps.cmake
deleted file mode 100644
index fc79a9356a87683c6cd9896c5df340986dd8a459..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/eof2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(eof2_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/eof2_external.cmake b/CMake/cdat_modules/eof2_external.cmake
deleted file mode 100644
index d1d98ee83e62abe91e99e6dbf4e88dfa18b02f2c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/eof2_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# EOF2
-#
-set(eof2_source "${CMAKE_CURRENT_BINARY_DIR}/build/eof2")
-
-ExternalProject_Add(eof2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${eof2_source}
-  URL ${eof2_URL}/${eof2_GZ}
-  URL_MD5 ${eof2_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${eof2_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/eof2_pkg.cmake b/CMake/cdat_modules/eof2_pkg.cmake
deleted file mode 100644
index 89c37406791d98092bc60e8d6cb64e975a64d0d5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/eof2_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(eof2_MAJOR )
-set(eof2_MINOR )
-set(eof2_VERSION 620a921b46b)
-set(eof2_URL ${LLNL_URL} )
-set(eof2_GZ eof2-${eof2_VERSION}.zip)
-set(eof2_MD5 39e21a8633f272dc8dc748adb4c7f0e8)
-set(eof2_SOURCE ${eof2_URL}/${eof2_GZ})
-
-add_cdat_package_dependent(eof2 "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/eofs_deps.cmake b/CMake/cdat_modules/eofs_deps.cmake
deleted file mode 100644
index 2746e30df6c4be57c27213de5b4e65875d7183f4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/eofs_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(eofs_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/eofs_external.cmake b/CMake/cdat_modules/eofs_external.cmake
deleted file mode 100644
index 23c9f26fdcf13190fd4e085ca33eef662b6c653d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/eofs_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Windfield`
-#
-set(eofs_source "${CMAKE_CURRENT_BINARY_DIR}/build/eofs")
-
-ExternalProject_Add(eofs
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${eofs_source}
-  URL ${eofs_URL}/${eofs_GZ}
-  URL_MD5 ${eofs_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${eofs_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/eofs_pkg.cmake b/CMake/cdat_modules/eofs_pkg.cmake
deleted file mode 100644
index 7fbe79aeb4c7820ef157258dea182a96c030547d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/eofs_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(eofs_MAJOR_SRC 1)
-set(eofs_MINOR_SRC 1)
-set(eofs_PATCH_SRC 0)
-set(eofs_VERSION ${eofs_MAJOR_SRC}.${eofs_MINOR_SRC}.${eofs_PATCH_SRC})
-set(eofs_URL ${LLNL_URL})
-set(eofs_GZ eofs-${eofs_VERSION}.tar.gz)
-set(eofs_MD5 52fce9f666d540069c90a6c109fcb3b4)
-set(eofs_SOURCE ${eofs_URL}/${eofs_GZ})
-
-add_cdat_package_dependent(eofs "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/esmf_deps.cmake b/CMake/cdat_modules/esmf_deps.cmake
deleted file mode 100644
index 631f2a9e452716b898baeb7be58599b2a966b265..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/esmf_deps.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-set(ESMF_deps ${pkgconfig_pkg} ${python_pkg})
-
-if(CDAT_BUILD_ESMF_PARALLEL)
-  set(ESMF_deps ${mpi_pkg} ${ESMF_deps})
-endif()
diff --git a/CMake/cdat_modules/esmf_external.cmake b/CMake/cdat_modules/esmf_external.cmake
deleted file mode 100644
index eaf9518ade10f3c9f9f9096904af75a06f410978..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/esmf_external.cmake
+++ /dev/null
@@ -1,78 +0,0 @@
-set(ESMF_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "")
-set(ESMF_source "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "")
-set(ESMF_install "${cdat_EXTERNALS}" CACHE INTERNAL "")
-set(ESMF_pthreads "OFF")
-set(ESMF_os "${CDAT_BUILD_ESMF_OS}")
-set(ESMF_compiler "${CDAT_BUILD_ESMF_COMPILER}")
-set(ESMF_abi "${CDAT_BUILD_ESMF_ABI}")
-set(ESMF_openmp "ON")
-
-if(APPLE)
-  if("${CMAKE_C_COMPILER_ID}" STREQUAL "Clang" AND ${CMAKE_C_COMPILER_VERSION} VERSION_GREATER 4.2)
-    # xcode 5 clang does not support openmp
-    set(ESMF_openmp "OFF")
-  endif()
-endif()
-
-# Check if ESMF should be built in parallel
-set(emsf_enable_mpi FALSE)
-if(CDAT_BUILD_ESMF_PARALLEL)
-  set(emsf_enable_mpi TRUE)
-endif()
-
-if("${emsf_enable_mpi}")
-  set(ESMF_comm "${CDAT_BUILD_ESMF_COMM}")
-else()
-  message("[INFO] CDAT will build ESMF serial")
-  set(ESMF_comm "mpiuni")
-endif()
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake
-  @ONLY
-)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake
-  @ONLY
-)
-
-set(ESMF_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake)
-set(ESMF_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake)
-
-# ESMF Python interface. Install after ESMF is done.
-set(ESMP_source "${ESMF_source_dir}/ESMP")
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake
-  @ONLY
-)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake
-  @ONLY
-)
-
-set(ESMP_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake)
-set(ESMP_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake)
-
-ExternalProject_Add(ESMF
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ESMF_source_dir}
-  INSTALL_DIR ${ESMF_install}
-  URL ${ESMF_URL}/${ESMF_GZ}
-  URL_MD5 ${ESMF_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${ESMF_build_command}
-  INSTALL_COMMAND ${ESMF_install_command}
-  INSTALL_COMMAND ${ESMP_install_command}
-  PATCH_COMMAND ${ESMP_patch_command}
-  DEPENDS ${ESMF_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/esmf_pkg.cmake b/CMake/cdat_modules/esmf_pkg.cmake
deleted file mode 100644
index 0fde0926976cc58e9bea0672ffb610a635def3de..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/esmf_pkg.cmake
+++ /dev/null
@@ -1,57 +0,0 @@
-include(CMakeDependentOption)
-
-set(ESMF_MAJOR 6)
-set(ESMF_MINOR 3)
-set(ESMF_PATCH 0rp1)
-set(ESMP_MAJOR 01)
-set(ESMF_VERSION ${ESMF_MAJOR}_${ESMF_MINOR}_${ESMF_PATCH})
-set(ESMF_URL ${LLNL_URL})
-set(ESMF_GZ esmp.ESMF_${ESMF_VERSION}_ESMP_${ESMP_MAJOR}.tar.bz2)
-set(ESMF_MD5  a9be4fb51da1bc1fab027137297c5030 )
-set(ESMF_SOURCE ${ESMF_URL}/${ESMF_GZ})
-
-if (CDAT_BUILD_LEAN)
-    option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" OFF)
-else ()
-    option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" ON)
-endif()
-
-cmake_dependent_option(CDAT_BUILD_ESMF_PARALLEL
-  "Build parallel version of Earth System Modeling Framework library" ON
-  "CDAT_BUILD_PARALLEL" OFF
-)
-
-set(TXCMAKE_DIR ${cdat_SOURCE_DIR}/contrib/sciMake)
-include(${TXCMAKE_DIR}/sciFuncsMacros.cmake)
-include(${TXCMAKE_DIR}/sciFortranChecks.cmake)
-
-if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU)
-  string(REGEX MATCHALL "[0-9]+\\." test_version_list ${Fortran_VERSION})
-  string(SUBSTRING ${Fortran_VERSION} 0 3 Fortran_MAJOR_VERSION)
-  LIST(GET test_version_list 0 Fortran_MAJOR_VERSION)
-  LIST(GET test_version_list 1 Fortran_MINOR_VERSION)
-else()
-  set(Fortran_MINOR_VERSION "")
-endif()
-
-if(CDAT_BUILD_ESMF_ESMP)
-    if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU)
-      # GNU gfortran must be >= 4.3 last 4.2 gfortran release was 4.2.4
-      if(${Fortran_VERSION} VERSION_GREATER "4.2.9" AND ${Fortran_VERSION} VERSION_LESS "5.2.2")
-        ## On APPLE need to test for -arch as well!
-        add_cdat_package(ESMF "" "Build ESMF" ON)
-      else()
-        message(FATAL_ERROR "[ERROR] gfortran must be 4.3 <= version < 5.2.2; you have ${Fortran_VERSION}")
-      endif()
-    else()
-      add_cdat_package(ESMF "" "Build ESMF" ON)
-      message("[INFO] Fortran Compiler is: ${CMAKE_Fortran_COMPILER}")
-    endif()
-
-     # the following may need to be adjusted on Crays, otherwise the defaults will likely apply
-     set(CDAT_BUILD_ESMF_OS "${CMAKE_SYSTEM_NAME}" CACHE STRING "ESMF_OS env variable, may need to change to Unicos on Crays")
-     set(CDAT_BUILD_ESMF_COMPILER "gfortran" CACHE STRING "ESMF_COMPILER env variable, choices are gfortran, intel, pgi, g95, or nag")
-     set(CDAT_BUILD_ESMF_COMM "openmpi" CACHE STRING "ESMF_COMM env variable, choices are openmpi, mpiuni, mpi, mpich2, or mvapich2")
-     set(CDAT_BUILD_ESMF_ABI "64" CACHE STRING "ESMF_ABI env variable, choices are 32 or 64")
-endif()
-
diff --git a/CMake/cdat_modules/ezget_deps.cmake b/CMake/cdat_modules/ezget_deps.cmake
deleted file mode 100644
index e859d355d626c7e20b0ab50d9755dd7bd6c15023..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ezget_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ezget_deps ${netcdf_pkg} ${libdrs_pkg} ${libcdms_pkg})
diff --git a/CMake/cdat_modules/ezget_external.cmake b/CMake/cdat_modules/ezget_external.cmake
deleted file mode 100644
index 078bebda05a9901f89fdc870db9413f73cdce7a1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ezget_external.cmake
+++ /dev/null
@@ -1,43 +0,0 @@
-set(ezget_source "${CMAKE_CURRENT_BINARY_DIR}/build/ezget")
-set(ezget_install "${cdat_EXTERNALS}")
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ezget_Makefile.gfortran.in
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_EZGET )
-    message("[INFO] [ezget] Installing ${nm} from ${GIT_CMD_STR_EZGET}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [ezget] Installed ${nm} from tarball ${EZGET_GZ}")
-    set(URL_STR URL ${EZGET_URL}/${EZGET_GZ})
-    set(URL_MD5_STR URL_MD5 ${EZGET_MD5})
-    set(GIT_CMD_STR_EZGET )
-    set(GIT_TAG )
-endif()
-set(EZGET_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile)
-set(EZGET_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile^^install)
-set(EZGET_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(ezget
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ezget_source}
-  INSTALL_DIR ${ezget_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_EZGET}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${ezget_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_EZGET)
-  unset(GIT_CMD_STR_EZGET)
-endif()
diff --git a/CMake/cdat_modules/ezget_pkg.cmake b/CMake/cdat_modules/ezget_pkg.cmake
deleted file mode 100644
index a18d67cfd172abb802366d26a01ad2c9632d55ac..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ezget_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(EZGET_VERSION 1.0.0)
-set(EZGET_BRANCH master)
-set(EZGET_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/EzGet.git )
-
-set(GIT_CMD_STR_EZGET GIT_REPOSITORY ${EZGET_REPOSITORY})
-set(GIT_TAG GIT_TAG "${EZGET_BRANCH}")
-
-if (CDAT_BUILD_PCMDI)
-  add_cdat_package(ezget "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/ffi_deps.cmake b/CMake/cdat_modules/ffi_deps.cmake
deleted file mode 100644
index 548c543fe56c7e25135f720904180f50ff4329bd..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ffi_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(FFI_deps ${pip_pkg})
diff --git a/CMake/cdat_modules/ffi_external.cmake b/CMake/cdat_modules/ffi_external.cmake
deleted file mode 100644
index df33e73ac2c7a08c80d18cd01b1b33c8d5427fe9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ffi_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(ffi_source "${CMAKE_CURRENT_BINARY_DIR}/build/ffi-${FFI_MAJOR}.${FFI_MINOR}.${FFI_PATCH}")
-set(ffi_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(FFI
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ffi_source}
-  INSTALL_DIR ${ffi_install}
-  URL ${FFI_URL}/${FFI_BZ2}
-  URL_MD5 ${FFI_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${FFI_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/ffi_pkg.cmake b/CMake/cdat_modules/ffi_pkg.cmake
deleted file mode 100644
index e7f0152d24c210dbddaffff6eea2ed4ab59daea0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ffi_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set( FFI_MAJOR 3  )
-set( FFI_MINOR 1  )
-set( FFI_PATCH 5  )
-set(FFI_URL ${LLNL_URL})
-set(FFI_BZ2 libffi-${FFI_MAJOR}.${FFI_MINOR}.tar.gz)
-set(FFI_MD5 f5898b29bbfd70502831a212d9249d10)
-
-set (nm FFI)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR})
-set(FFI_SOURCE ${FFI_URL}/${FFI_BZ2})
-
-add_cdat_package_dependent(FFI "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/ffmpeg_deps.cmake b/CMake/cdat_modules/ffmpeg_deps.cmake
deleted file mode 100644
index b9278168421e38d5d9b5e0b7fd655f9bc67456b6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ffmpeg_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(FFMPEG_deps ${pkgconfig_pkg} ${x264_pkg})
diff --git a/CMake/cdat_modules/ffmpeg_external.cmake b/CMake/cdat_modules/ffmpeg_external.cmake
deleted file mode 100644
index 1a2fe723ab03f7bc50e5885f5916cc70a0ebc986..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ffmpeg_external.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-# The FFMPEG external project for ParaView
-set(ffmpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/FFMPEG")
-set(ffmpeg_install "${cdat_EXTERNALS}")
-set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin)
-
-find_program(YASM_BIN "yasm")
-
-if (NOT YASM_BIN)
-  set(ffmpeg_conf_args --disable-yasm^^--enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib)
-else()
-  set(ffmpeg_conf_args --enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib)
-endif()
-
-ExternalProject_Add(FFMPEG
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ffmpeg_source}
-  INSTALL_DIR ${ffmpeg_install}
-  URL ${FFMPEG_URL}/${FFMPEG_GZ}
-  URL_MD5 ${FFMPEG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${ffmpeg_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${FFMPEG_deps}
-  ${ep_log_options}
-  )
-
-set(FFMPEG_INCLUDE_DIR ${ffmpeg_install}/include)
-set(FFMPEG_avcodec_LIBRARY ${ffmpeg_install}/lib/libavcodec${_LINK_LIBRARY_SUFFIX})
-set(FFMPEG_avformat_LIBRARY ${ffmpeg_install}/lib/libavformat${_LINK_LIBRARY_SUFFIX})
-set(FFMPEG_avutil_LIBRARY ${ffmpeg_install}/lib/libavutil${_LINK_LIBRARY_SUFFIX})
-set(FFMPEG_swscale_LIBRARY ${ffmpeg_install}/lib/libswscale${_LINK_LIBRARY_SUFFIX})
diff --git a/CMake/cdat_modules/ffmpeg_pkg.cmake b/CMake/cdat_modules/ffmpeg_pkg.cmake
deleted file mode 100644
index 65db2986554728866a8bccc0da0ae920072682dc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ffmpeg_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(FFMPEG_MAJOR_SRC 2)
-set(FFMPEG_MINOR_SRC 7)
-set(FFMPEG_PATCH_SRC 0)
-set(FFMPEG_URL ${LLNL_URL})
-set(FFMPEG_GZ ffmpeg-${FFMPEG_MAJOR_SRC}.${FFMPEG_MINOR_SRC}.tar.gz)
-set(FFMPEG_MD5 3ad0554981faf2c6deef23a1cd4c8c57)
-
-set (nm FFMPEG)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC})
-set(FFMPEG_SOURCE ${FFMPEG_URL}/${FFMPEG_GZ})
-set(FFMPEG_ROOT ${cdat_EXTERNALS} CACHE PATH "Path to FFMPEG root directory")
-
-add_cdat_package_dependent(FFMPEG "" "" ON "CDAT_BUILD_GRAPHICS" OFF) 
diff --git a/CMake/cdat_modules/fiona_deps.cmake b/CMake/cdat_modules/fiona_deps.cmake
deleted file mode 100644
index 624113df208b4585287a55c19994e8c67f402062..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/fiona_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Fiona_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${shapely_pkg} ${gdal_pkg} ${cligj_pkg})
diff --git a/CMake/cdat_modules/fiona_external.cmake b/CMake/cdat_modules/fiona_external.cmake
deleted file mode 100644
index 4d7e45c7598a7a5bb952606fc5bb66bd0ff8411c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/fiona_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-# create an external project to install Fiona
-# and configure and build it
-set(nm Fiona)
-set(USR_ENVS "GDAL_CONFIG=${cdat_EXTERNALS}/bin/gdal-config")
-#set(USER_BUILD_EXT_OPTS "build_ext -I${cdat_EXTERNALS}/include -L${cdat_EXTERNALS}/lib -lgdal")
-include(pipinstaller)
diff --git a/CMake/cdat_modules/fiona_pkg.cmake b/CMake/cdat_modules/fiona_pkg.cmake
deleted file mode 100644
index 1cd9024343b94bc32dfb6e6698d6d3c380a7cb7c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/fiona_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set( FIONA_MAJOR_SRC 1  )
-set( FIONA_MINOR_SRC 6 )
-set( FIONA_PATCH_SRC 0  )
-set(FIONA_URL ${LLNL_URL})
-set(FIONA_GZ
-    Fiona-${FIONA_MAJOR_SRC}.${FIONA_MINOR_SRC}.${FIONA_PATCH_SRC}.tar.gz)
-set(FIONA_MD5 40f945898c550721db715f69658cf7e9 )
-set(FIONA_SOURCE ${FIONA_URL}/${FIONA_GZ})
-
-set (nm FIONA)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-if (CDAT_BUILD_ALL)
-  add_cdat_package(Fiona "" "" ON)
-else()
-  add_cdat_package(Fiona "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/flake8_deps.cmake b/CMake/cdat_modules/flake8_deps.cmake
deleted file mode 100644
index 490185ec02f125c444703faf1ac8e88c00e44c27..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/flake8_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(flake8_deps ${python_pkg} ${setuptools_pkg} ${pyflakes_pkg} ${pep8_pkg} ${mccabe_pkg})
diff --git a/CMake/cdat_modules/flake8_external.cmake b/CMake/cdat_modules/flake8_external.cmake
deleted file mode 100644
index 5f05cb3f85f921b8b0d0c5ce35c2fdbc55ed6319..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/flake8_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-ExternalProject_Add(flake8
-  DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}"
-  SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/flake8"
-  URL "${FLAKE8_SOURCE}"
-  URL_MD5 ${FLAKE8_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build
-  INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${flake8_deps}
-  ${ep_log_options}
-  )
-
-if (APPLE)
-  set(FLAKE8_EXECUTABLE
-    "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/flake8")
-else()
-  set(FLAKE8_EXECUTABLE "${CMAKE_INSTALL_PREFIX}/bin/flake8")
-endif()
diff --git a/CMake/cdat_modules/flake8_pkg.cmake b/CMake/cdat_modules/flake8_pkg.cmake
deleted file mode 100644
index f10ebf053d47a48dc50f2833257b97299169a3f5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/flake8_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(nm flake8)
-string(TOUPPER ${nm} uc_nm)
-
-set(${uc_nm}_MAJOR 2)
-set(${uc_nm}_MINOR 4)
-set(${uc_nm}_PATCH 1)
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_URL ${LLNL_URL})
-set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz)
-set(${uc_nm}_MD5 ed45d3db81a3b7c88bd63c6e37ca1d65)
-
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ})
-
-if(BUILD_TESTING)
-  add_cdat_package(${nm} "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/fontconfig_deps.cmake b/CMake/cdat_modules/fontconfig_deps.cmake
deleted file mode 100644
index 87455d1355c81850422176fb1c960fa1ab4d7424..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/fontconfig_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(fontconfig_deps ${pkgconfig_pkg} ${libxml2_pkg} ${freetype_pkg})
diff --git a/CMake/cdat_modules/fontconfig_external.cmake b/CMake/cdat_modules/fontconfig_external.cmake
deleted file mode 100644
index fa57bc888ee6794d4cbf724f35ff21bc360c9d73..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/fontconfig_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(fontconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/fontconfig")
-set(fontconfig_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(fontconfig
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${fontconfig_source}
-  INSTALL_DIR ${fontconfig_install}
-  URL ${FTCFG_URL}/${FTCFG_GZ}
-  URL_MD5 ${FTCFG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=--disable-docs^^--enable-libxml2 -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${fontconfig_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/fontconfig_pkg.cmake b/CMake/cdat_modules/fontconfig_pkg.cmake
deleted file mode 100644
index 9598115827f36ffad37816a9d54b929d8abc130f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/fontconfig_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(FTCFG_MAJOR 2)
-set(FTCFG_MINOR 4)
-set(FTCFG_PATCH 2)
-set(FTCFG_MAJOR_SRC 2)
-set(FTCFG_MINOR_SRC 10)
-set(FTCFG_PATCH_SRC 1)
-set(FTCFG_URL ${LLNL_URL})
-set(FTCFG_GZ fontconfig-${FTCFG_MAJOR_SRC}.${FTCFG_MINOR_SRC}.${FTCFG_PATCH_SRC}.tar.gz)
-set(FTCFG_MD5 43808dd9153cff1c3ac302e94e024814)
-
-set (nm FTCFG)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(FONTCONFIG_VERSION ${FTCFG_VERSION})
-set(FONTCONFIG_SOURCE ${FTCFG_URL}/${FTCFG_GZ})
-
-add_cdat_package_dependent(fontconfig "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/freetype_deps.cmake b/CMake/cdat_modules/freetype_deps.cmake
deleted file mode 100644
index 6d451c65ff5329e0eac525439fe73dba8f32ddea..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/freetype_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(freetype_deps ${pkgconfig_pkg} ${png_pkg})
diff --git a/CMake/cdat_modules/freetype_external.cmake b/CMake/cdat_modules/freetype_external.cmake
deleted file mode 100644
index 3af29439922872d1468ba2bda2633b5e3a287d6f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/freetype_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-
-set(freetype_source "${CMAKE_CURRENT_BINARY_DIR}/build/freetype")
-set(freetype_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(freetype
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${freetype_source}
-  INSTALL_DIR ${freetype_install}
-  URL ${FT_URL}/${FT_GZ}
-  URL_MD5 ${FT_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${freetype_deps}
-  ${ep_log_options}
-)
-
-#ln -sf @EXTERNALS@/include/freetype2/freetype @EXTERNALS@/include/freetype
-
-ExternalProject_Add_Step(freetype symlink
-  COMMAND ${CMAKE_COMMAND} -E create_symlink ${cdat_EXTERNALS}/include/freetype2/freetype ${cdat_EXTERNALS}/include/freetype
-  COMMENT "Symlink include/freetype2/freetype include directory as include/freetype"
-  DEPENDEES install
-)
diff --git a/CMake/cdat_modules/freetype_pkg.cmake b/CMake/cdat_modules/freetype_pkg.cmake
deleted file mode 100644
index 596ce205a6b1f7f13a3a4745c58adbc56c1aebff..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/freetype_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(FT_MAJOR 9)
-set(FT_MINOR 7)
-set(FT_PATCH 3)
-set(FT_MAJOR_SRC 2)
-set(FT_MINOR_SRC 4)
-set(FT_PATCH_SRC 10)
-set(FT_URL ${LLNL_URL})
-set(FT_GZ freetype-${FT_MAJOR_SRC}.${FT_MINOR_SRC}.${FT_PATCH_SRC}.tar.gz)
-set(FT_MD5 4b1887901730ff2e12562ef30fa521d5)
-
-set (nm FT)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(FREETYPE_VERSION ${FT_VERSION})
-set(FREETYPE_SOURCE ${FT_URL}/${FT_GZ})
-
-
-add_cdat_package_dependent(freetype "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/g2clib_deps.cmake b/CMake/cdat_modules/g2clib_deps.cmake
deleted file mode 100644
index a2994c83222c225483028735ca0435bb64643e60..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/g2clib_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(g2clib_deps ${pkgconfig_pkg} ${png_pkg} ${zlib_pkg} ${jasper_pkg})
diff --git a/CMake/cdat_modules/g2clib_external.cmake b/CMake/cdat_modules/g2clib_external.cmake
deleted file mode 100644
index 5a1406979b6acd00de0017ff8216bef60d1e261e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/g2clib_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-
-set(g2clib_source "${CMAKE_CURRENT_BINARY_DIR}/build/g2clib")
-set(g2clib_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(g2clib
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${g2clib_source}
-  INSTALL_DIR ${g2clib_install}
-  URL ${G2CLIB_URL}/${G2CLIB_GZ}
-  URL_MD5 ${G2CLIB_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${g2clib_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/g2clib_pkg.cmake b/CMake/cdat_modules/g2clib_pkg.cmake
deleted file mode 100644
index 41580b8eb898443a546331acdfbb78a7b04ace2f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/g2clib_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(G2CLIB_MAJOR 1)
-set(G2CLIB_MINOR 4)
-set(G2CLIB_PATCH 0b)
-set(G2CLIB_VERSION ${G2CLIB_MAJOR}.${G2CLIB_MINOR}.${G2CLIB_PATCH})
-set(G2CLIB_URL ${LLNL_URL})
-set(G2CLIB_GZ g2clib-${G2CLIB_VERSION}.tar.gz)
-set(G2CLIB_MD5 72378d980b2f4d6b09fd86e23e884a4b)
-set(G2CLIB_SOURCE ${G2CLIB_URL}/${G2CLIB_GZ})
-
-
-add_cdat_package(g2clib "" "" ON)
diff --git a/CMake/cdat_modules/gdal_deps.cmake b/CMake/cdat_modules/gdal_deps.cmake
deleted file mode 100644
index 3fbc8ce4d11b75397f4ea116a678d079e8d2d89b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gdal_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(gdal_deps ${pkgconfig_pkg} ${python_pkg} ${uuid_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jasper_pkg} ${geos_pkg} ${setuptools_pkg} ${proj4_pkg})
diff --git a/CMake/cdat_modules/gdal_external.cmake b/CMake/cdat_modules/gdal_external.cmake
deleted file mode 100644
index 33e4c8e60a4a440f6fdb9ab4227213cd49b83644..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gdal_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-set(gdal_source "${CMAKE_CURRENT_BINARY_DIR}/build/gdal")
-set(gdal_install "${cdat_EXTERNALS}")
-set(gdal_configure_args "--prefix=${cdat_EXTERNALS}^^--with-hdf5=${cdat_EXTERNALS}^^--with-netcdf=${cdat_EXTERNALS}^^--with-curl=${cdat_EXTERNALS}^^--with-geos=${cdat_EXTERNALS}/bin/geos-config^^--with-python=${PYTHON_EXECUTABLE}^^--with-jpeg=no^^--with-libtiff=internal^^--without-jpeg12^^--with-geotiff=internal^^--with-static-proj4=${cdat_EXTERNALS}/proj4")
-
-if (CDAT_BUILD_PARALLEL)
-  set(configure_file "cdatmpi_configure_step.cmake")
-else()
-  set(configure_file "cdat_configure_step.cmake")
-endif()
-message("[GDAL] CONF FILE IS:"${configure_file})
-ExternalProject_Add(gdal
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${gdal_source}
-  INSTALL_DIR ${gdal_install}
-  URL ${GDAL_URL}/${GDAL_GZ}
-  URL_MD5 ${GDAL_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${gdal_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS "${gdal_deps}"
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/gdal_pkg.cmake b/CMake/cdat_modules/gdal_pkg.cmake
deleted file mode 100644
index d8756b2bbf54c88197bedb3897197c9c7393d164..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gdal_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set( GDAL_MAJOR 1  )
-set( GDAL_MINOR 11  )
-set( GDAL_PATCH 2  )
-set(GDAL_URL ${LLNL_URL})
-set(GDAL_GZ gdal-${GDAL_MAJOR}.${GDAL_MINOR}.${GDAL_PATCH}.tar.gz)
-set(GDAL_MD5 866a46f72b1feadd60310206439c1a76 )
-
-set (nm GDAL)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(GDAL_SOURCE ${GDAL_URL}/${GDAL_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" ON)
-else()
-  add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" OFF)
-endif()
diff --git a/CMake/cdat_modules/geos_deps.cmake b/CMake/cdat_modules/geos_deps.cmake
deleted file mode 100644
index 3b1cbf81b5961552044e27c09c61a7233f043a83..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/geos_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(GEOS_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/geos_external.cmake b/CMake/cdat_modules/geos_external.cmake
deleted file mode 100644
index d7f8e65672c420606d944a13d496ec60979f5cde..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/geos_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(geos_source "${CMAKE_CURRENT_BINARY_DIR}/build/geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}")
-set(geos_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(GEOS
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${geos_source}
-  INSTALL_DIR ${geos_install}
-  URL ${GEOS_URL}/${GEOS_BZ2}
-  URL_MD5 ${GEOS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${GEOS_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/geos_pkg.cmake b/CMake/cdat_modules/geos_pkg.cmake
deleted file mode 100644
index d2927b020004bed48edd91f63993bd11bc829760..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/geos_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set( GEOS_MAJOR 3  )
-set( GEOS_MINOR 3  )
-set( GEOS_PATCH 5  )
-set(GEOS_URL ${LLNL_URL})
-set(GEOS_BZ2 geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}.tar.bz2)
-set(GEOS_MD5 2ba61afb7fe2c5ddf642d82d7b16e75b)
-
-set (nm GEOS)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(GEOS_SOURCE ${GEOS_URL}/${GEOS_BZ2})
-
-add_cdat_package_dependent(GEOS "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/ghostscript_deps.cmake b/CMake/cdat_modules/ghostscript_deps.cmake
deleted file mode 100644
index 0f4bedf9665f99129b0c579cdac07142fd2dd022..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ghostscript_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ghostscript_deps ${pkgconfig_pkg} ${zlib_pkg} ${jpeg_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libXSLT_pkg} ${libXML2_pkg})
diff --git a/CMake/cdat_modules/ghostscript_external.cmake b/CMake/cdat_modules/ghostscript_external.cmake
deleted file mode 100644
index fc322b66ac2bd60f2efdf407f4565e6997d2f512..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ghostscript_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-
-set(ghostscript_source "${CMAKE_CURRENT_BINARY_DIR}/build/ghostscript")
-set(ghostscript_install "${cdat_EXTERNALS}")
-
-set(ghostscripts_args "--with-drivers=PS,BMP --disable-cups")
-
-ExternalProject_Add(ghostscript
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ghostscript_source}
-  INSTALL_DIR ${ghostscript_install}
-  URL ${GS_URL}/${GS_GZ}
-  URL_MD5 ${GS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${ghostscripts_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DBUILD_ARGS=${ghostscript_source} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${ghostscript_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/ghostscript_pkg.cmake b/CMake/cdat_modules/ghostscript_pkg.cmake
deleted file mode 100644
index d56e90ece1be6316abe484180a7bf28be8f0e4cf..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ghostscript_pkg.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-add_cdat_package(ghostscript "" "" OFF)
-
diff --git a/CMake/cdat_modules/gifsicle_external.cmake b/CMake/cdat_modules/gifsicle_external.cmake
deleted file mode 100644
index 853f5d55a3c0a24362a776d0764675b392f37001..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gifsicle_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(gifsicle_source "${CMAKE_CURRENT_BINARY_DIR}/build/gifsicle")
-set(gifsicle_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(gifsicle
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${gifsicle_source}
-  INSTALL_DIR ${gifsicle_install}
-  URL ${GIFSICLE_URL}/${GIFSICLE_GZ}
-  URL_MD5 ${GIFSICLE_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${gifsicle_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/gsw_deps.cmake b/CMake/cdat_modules/gsw_deps.cmake
deleted file mode 100644
index 9d0b19879008294e24e9b52d8530281c3bc66ce2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gsw_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(gsw_deps ${python_pkg} ${numpy_pkg})
diff --git a/CMake/cdat_modules/gsw_external.cmake b/CMake/cdat_modules/gsw_external.cmake
deleted file mode 100644
index 24c3c0e58509fed6a9e6f5a746a2adbff1a38826..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gsw_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# gsw (Gibbs Seawater)
-#
-set(gsw_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gsw")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/gsw_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake"
-  @ONLY
-)
-
-set(gsw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake)
-
-ExternalProject_Add(gsw
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${gsw_source_dir}
-  URL ${GSW_URL}/${GSW_GZ}
-  URL_MD5 ${GSW_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${gsw_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${gsw_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/gsw_pkg.cmake b/CMake/cdat_modules/gsw_pkg.cmake
deleted file mode 100644
index 127a403c0df63f620b28aaf543533f152746e849..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gsw_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set( GSW_MAJOR 3 )
-set( GSW_MINOR 0 )
-set( GSW_PATCH 3 )
-set( GSW_VERSION ${GSW_MAJOR}.${GSW_MINOR}.${GSW_PATCH} )
-set( GSW_URL ${LLNL_URL} )
-set( GSW_GZ python-gsw-${GSW_VERSION}.tar.gz )
-set( GSW_MD5 a522a9ab6ab41fb70064e0378e904ffd )
-
-set (nm GSW)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(GSW_SOURCE ${GSW_URL}/${GSW_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(gsw "" "" ON)
-else()
-  add_cdat_package(gsw "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/gui_support_deps.cmake b/CMake/cdat_modules/gui_support_deps.cmake
deleted file mode 100644
index 3c7bc73790c448713f053c2278ea185c69490edd..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gui_support_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(gui_support_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/gui_support_external.cmake b/CMake/cdat_modules/gui_support_external.cmake
deleted file mode 100644
index 5d10b82e7ba960411e9326931aa35108d063dea8..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gui_support_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-set(gui_support_source_dir "${cdat_SOURCE_DIR}/Packages/gui_support")
-set(gui_support_binary_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gui_support-build")
-set(runtime_library_path ${CMAKE_INSTALL_PREFIX}/lib:${cdat_EXTERNALS}/lib)
-
-#  BUILD_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py build
-#  INSTALL_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py install --prefix=${CMAKE_INSTALL_PREFIX}
-ExternalProject_Add(gui_support
-  DOWNLOAD_DIR ""
-  SOURCE_DIR ${gui_support_source_dir}
-  BINARY_DIR ${gui_support_binary_dir}
-  BUILD_IN_SOURCE 0
-  BUILD_COMMAND ""
-#  BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS}  ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir}
-  INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS} ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir} install ${PYTHON_EXTRA_PREFIX}
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  DEPENDS ${gui_support_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/gui_support_pkg.cmake b/CMake/cdat_modules/gui_support_pkg.cmake
deleted file mode 100644
index f4fcee7d674618c33c9609924d84c32c623b259a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/gui_support_pkg.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-set(GUI_SUPPORT_SOURCE N/A)
-set(GUI_SUPPORT_VERSION N/A)
-set(GUI_SUPPORT_MD5 N/A)
-
-add_cdat_package_dependent(gui_support "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/h5py_deps.cmake b/CMake/cdat_modules/h5py_deps.cmake
deleted file mode 100644
index f1ce2f917d29cf2c70cce92723a47b32a43211f7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/h5py_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(h5py_deps ${python_pkg} ${hdf5_pkg} ${numpy_pkg} ${cython_pkg})
diff --git a/CMake/cdat_modules/h5py_external.cmake b/CMake/cdat_modules/h5py_external.cmake
deleted file mode 100644
index 83762f1e608b81c66bc469908354dc3c41b96f88..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/h5py_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# h5py
-#
-set(h5py_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/h5py")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/h5py_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake"
-  @ONLY
-)
-
-set(h5py_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake)
-
-ExternalProject_Add(h5py
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${h5py_source_dir}
-  URL ${H5PY_URL}/${H5PY_GZ}
-  URL_MD5 ${H5PY_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${h5py_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${h5py_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/h5py_pkg.cmake b/CMake/cdat_modules/h5py_pkg.cmake
deleted file mode 100644
index 3a753fc2b20d2a48a349c91431873ca4980f5098..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/h5py_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(H5PY_MAJOR 2)
-set(H5PY_MINOR 5)
-set(H5PY_PATCH 0)
-set(H5PY_VERSION ${H5PY_MAJOR}.${H5PY_MINOR}.${H5PY_PATCH})
-set(H5PY_URL ${LLNL_URL})
-set(H5PY_GZ h5py-${H5PY_VERSION}.tar.gz)
-set(H5PY_MD5 969c78e366e8e86dcd0376d945a72dd0)
-
-set (nm H5PY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(H5PY_SOURCE ${H5PY_URL}/${H5PY_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(h5py "" "" ON)
-else()
-  add_cdat_package(h5py "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/hdf4_external.cmake b/CMake/cdat_modules/hdf4_external.cmake
deleted file mode 100644
index 7b34bef0b7fddc231ec71f3043c2730b033d12a2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/hdf4_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-
-set(HDF4_source "${CMAKE_CURRENT_BINARY_DIR}/HDF4")
-set(HDF4_install "${cdat_EXTERNALS}")
-
-if(NOT CMAKE_Fortran_COMPILER)
-  set(hdf4_configure_args --disable-fortran)
-else()
-  set(hdf4_configure_args --enable-fortran)
-endif()
-
-ExternalProject_Add(HDF4
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${HDF4_source}
-  INSTALL_DIR ${HDF4_install}
-  URL ${HDF4_URL}/${HDF4_GZ}
-  URL_MD5 ${HDF4_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${hdf4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${HDF4_deps}
-)
-
-set(HDF4_DIR "${HDF4_binary}" CACHE PATH "HDF4 binary directory" FORCE)
-mark_as_advanced(HDF4_DIR)
diff --git a/CMake/cdat_modules/hdf5_deps.cmake b/CMake/cdat_modules/hdf5_deps.cmake
deleted file mode 100644
index 45a66d741a6cdb629c28f3ec04403ac4eba68898..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/hdf5_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(HDF5_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg})
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND HDF5_deps ${mpi_pkg})
-endif()
diff --git a/CMake/cdat_modules/hdf5_external.cmake b/CMake/cdat_modules/hdf5_external.cmake
deleted file mode 100644
index 7f20675f7d4fc5835056e6cebbb6599fd6eb070b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/hdf5_external.cmake
+++ /dev/null
@@ -1,40 +0,0 @@
-
-set(HDF5_source "${CMAKE_CURRENT_BINARY_DIR}/build/HDF5")
-set(HDF5_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/hdf5_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake
-  @ONLY)
-if (CDAT_BUILD_PARALLEL)
-  set(hdf5_configure_args "--enable-parallel")
-  set(hdf5_additional_cflags "-w -fPIC")
-  set(configure_file "cdatmpi_configure_step.cmake")
-else()
-  set(hdf5_configure_args "")
-  set(hdf5_additional_cflags "-w")
-  set(configure_file "cdat_configure_step.cmake")
-endif()
-# we disable HDF5 warnings because it has way too many of them.
-ExternalProject_Add(HDF5
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${HDF5_source}
-  INSTALL_DIR ${HDF5_install}
-  URL ${HDF5_URL}/${HDF5_GZ}
-  URL_MD5 ${HDF5_MD5}
-  BUILD_IN_SOURCE 1
-  #PATCH_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${hdf5_configure_args} -DADDITIONAL_CFLAGS=${hdf5_additional_cflags} -DADDITIONAL_CPPFPAGS=-w -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  LOG_BUILD 1
-  DEPENDS ${HDF5_deps}
-  ${ep_log_options}
-)
-
-if(WIN32)
-  set(HDF5_INCLUDE_DIR ${HDF5_install}/include)
-  set(HDF5_LIBRARY ${HDF5_install}/lib/hdf5dll${_LINK_LIBRARY_SUFFIX})
-else()
-  set(HDF5_INCLUDE_DIR ${HDF5_install}/include)
-  set(HDF5_LIBRARY ${HDF5_install}/lib/libhdf5${_LINK_LIBRARY_SUFFIX})
-endif()
diff --git a/CMake/cdat_modules/hdf5_pkg.cmake b/CMake/cdat_modules/hdf5_pkg.cmake
deleted file mode 100644
index 4599c9c95bb4d0ebed0de422f42904b783dac1a5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/hdf5_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(HDF5_MAJOR_SRC 1)
-set(HDF5_MINOR_SRC 8)
-set(HDF5_PATCH_SRC 15)
-set(HDF5_URL ${LLNL_URL})
-set(HDF5_GZ hdf5-${HDF5_MAJOR_SRC}.${HDF5_MINOR_SRC}.${HDF5_PATCH_SRC}.tar.gz)
-set(HDF5_MD5 03cccb5b33dbe975fdcd8ae9dc021f24 )
-
-set (nm HDF5)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(HDF5_SOURCE ${HDF5_URL}/${HDF5_GZ})
-
-add_cdat_package(HDF5 "" "" ON)
diff --git a/CMake/cdat_modules/idna_deps.cmake b/CMake/cdat_modules/idna_deps.cmake
deleted file mode 100644
index e2aa851a86d378cc49d03340949f6a860017e6d8..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/idna_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(IDNA_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/idna_external.cmake b/CMake/cdat_modules/idna_external.cmake
deleted file mode 100644
index a987e968f52f07d8039e8d31838dcf7f83062b13..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/idna_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm IDNA)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/idna_pkg.cmake b/CMake/cdat_modules/idna_pkg.cmake
deleted file mode 100644
index 5bf85392916e411ec5d71ccfd7f51df30b01faff..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/idna_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(IDNA_MAJOR_SRC 2)
-set(IDNA_MINOR_SRC 0)
-set(IDNA_PATCH_SRC 0)
-
-set(IDNA_VERSION ${IDNA_MAJOR_SRC}.${IDNA_MINOR_SRC})
-set(IDNA_GZ idna-${IDNA_VERSION}.tar.gz)
-set(IDNA_SOURCE ${LLNL_URL}/${IDNA_GZ})
-set(IDNA_MD5 9ef51e6e51ea91b6c62426856c8a5b7c)
-
-add_cdat_package_dependent(IDNA "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/ipaddress_deps.cmake b/CMake/cdat_modules/ipaddress_deps.cmake
deleted file mode 100644
index ca515655fee0c8374583218cb8c7e7b60c9d757d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ipaddress_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(IPADDRESS_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/ipaddress_external.cmake b/CMake/cdat_modules/ipaddress_external.cmake
deleted file mode 100644
index 4773cea4c647d663fc29a353e2504d338a3e83b9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ipaddress_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm IPADDRESS)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/ipaddress_pkg.cmake b/CMake/cdat_modules/ipaddress_pkg.cmake
deleted file mode 100644
index 68ce4f62931f9d4e8cee9d1f60c5a0494a63b9e0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ipaddress_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(IPADDRESS_MAJOR_SRC 1)
-set(IPADDRESS_MINOR_SRC 0)
-set(IPADDRESS_PATCH_SRC 16)
-
-set(IPADDRESS_VERSION ${IPADDRESS_MAJOR_SRC}.${IPADDRESS_MINOR_SRC}.${IPADDRESS_PATCH_SRC})
-set(IPADDRESS_GZ ipaddress-${IPADDRESS_VERSION}.tar.gz)
-set(IPADDRESS_SOURCE ${LLNL_URL}/${IPADDRESS_GZ})
-set(IPADDRESS_MD5 1e27b62aa20f5b6fc200b2bdbf0d0847)
-
-add_cdat_package_dependent(IPADDRESS "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/ipython_deps.cmake b/CMake/cdat_modules/ipython_deps.cmake
deleted file mode 100644
index 0828bdbd98f65163dbfb7d7daa09f61e4d95d9b2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ipython_deps.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-if (CDAT_BUILD_ALL)
-  set(IPYTHON_deps ${pip_pkg} ${tornado_pkg} ${numpy_pkg} ${numexpr_pkg})
-else ()
-  set(IPYTHON_deps ${pip_pkg} ${numpy_pkg})
-endif()
diff --git a/CMake/cdat_modules/ipython_external.cmake b/CMake/cdat_modules/ipython_external.cmake
deleted file mode 100644
index eab083a8ec97596575c80a9bcf10bb98bf57396e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ipython_external.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm IPYTHON)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/ipython_pkg.cmake b/CMake/cdat_modules/ipython_pkg.cmake
deleted file mode 100644
index ce9193f5c04f77e2b60a11e62af536e17e885c71..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ipython_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(IPYTHON_MAJOR 3)
-set(IPYTHON_MINOR 0)
-set(IPYTHON_PATCH 0)
-set(IPYTHON_VERSION ${IPYTHON_MAJOR}.${IPYTHON_MINOR}.${IPYTHON_PATCH})
-set(IPYTHON_URL ${LLNL_URL})
-set(IPYTHON_GZ ipython-${IPYTHON_VERSION}.tar.gz)
-set(IPYTHON_MD5 b3f00f3c0be036fafef3b0b9d663f27e)
-set(IPYTHON_SOURCE ${IPYTHON_URL}/${IPYTHON_GZ})
-
-add_cdat_package(IPYTHON "" "" ON)
diff --git a/CMake/cdat_modules/jasper_deps.cmake b/CMake/cdat_modules/jasper_deps.cmake
deleted file mode 100644
index 4e51869526272ee54fce392b10ead2b263d1f75f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jasper_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(jasper_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/jasper_external.cmake b/CMake/cdat_modules/jasper_external.cmake
deleted file mode 100644
index 81c9f5f8d5f5b2044f5c967e4319a07e33734645..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jasper_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-
-set(jasper_source "${CMAKE_CURRENT_BINARY_DIR}/build/jasper")
-set(jasper_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jasper_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake
-    @ONLY)
-
-ExternalProject_Add(jasper
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${jasper_source}
-  INSTALL_DIR ${jasper_install}
-  URL ${JASPER_URL}/${JASPER_GZ}
-  URL_MD5 ${JASPER_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake
-  DEPENDS ${jasper_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/jasper_pkg.cmake b/CMake/cdat_modules/jasper_pkg.cmake
deleted file mode 100644
index a4f8987232c839161c439e7fa7e188f59015b7db..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jasper_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(JASPER_MAJOR 1)
-set(JASPER_MINOR 900)
-set(JASPER_PATCH 1)
-set(JASPER_VERSION ${JASPER_MAJOR}.${JASPER_MINOR}.${JASPER_PATCH})
-set(JASPER_URL ${LLNL_URL})
-set(JASPER_GZ jasper-${JASPER_VERSION}.tgz)
-set(JASPER_MD5 b5ae85050d034555790a3ccbc2522860)
-set(JASPER_SOURCE ${JASPER_URL}/${JASPER_GZ})
-
-add_cdat_package(jasper "" "" ON)
diff --git a/CMake/cdat_modules/jinja2_deps.cmake b/CMake/cdat_modules/jinja2_deps.cmake
deleted file mode 100644
index a8047b98ab608f03688ba42960d6826aa99e7147..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jinja2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(jinja2_deps ${pip_pkg} ${markupsafe_pkg})
diff --git a/CMake/cdat_modules/jinja2_external.cmake b/CMake/cdat_modules/jinja2_external.cmake
deleted file mode 100644
index a50b6c79fb085e8fa575658be2c3493e72b7cefe..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jinja2_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm jinja2)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/jinja2_pkg.cmake b/CMake/cdat_modules/jinja2_pkg.cmake
deleted file mode 100644
index ffabe31e3dbabcc39ed9167c096ddb5a5acfaed4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jinja2_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(jinja2_MAJOR_SRC 2)
-set(jinja2_MINOR_SRC 7)
-set(jinja2_PATCH_SRC )
-set(JINJA2_VERSION ${jinja2_MAJOR_SRC}.${jinja2_MINOR_SRC})
-set(JINJA2_GZ Jinja2-${JINJA2_VERSION}.tar.gz)
-set(JINJA2_SOURCE ${LLNL_URL}/${JINJA2_GZ})
-set(JINJA2_MD5 c2fb12cbbb523c57d3d15bfe4dc0e8fe )
-
-add_cdat_package_dependent(jinja2 "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/jpeg_deps.cmake b/CMake/cdat_modules/jpeg_deps.cmake
deleted file mode 100644
index e7e6b16ba9b67ee96608ae7f5d95269f251df29b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jpeg_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(jpeg_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/jpeg_external.cmake b/CMake/cdat_modules/jpeg_external.cmake
deleted file mode 100644
index e5a6f62bffb8024bc835fe9a10ed57e1ba6aff75..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jpeg_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-
-set(jpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/jpeg")
-set(jpeg_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jpeg_install_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake
-    @ONLY)
-
-set(jpeg_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake)
-
-ExternalProject_Add(jpeg
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${jpeg_source}
-  INSTALL_DIR ${jpeg_install}
-  URL ${JPEG_URL}/${JPEG_GZ}
-  URL_MD5 ${JPEG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  INSTALL_COMMAND ${jpeg_INSTALL_COMMAND}
-  DEPENDS ${jpeg_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/jpeg_pkg.cmake b/CMake/cdat_modules/jpeg_pkg.cmake
deleted file mode 100644
index c30e433f7ff975548590018d02f5d7bc50083439..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/jpeg_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(JPEG_URL ${LLNL_URL})
-set(JPEG_GZ jpegsrc.v8c.tar.gz)
-set(JPEG_MD5 a2c10c04f396a9ce72894beb18b4e1f9)
-
-set(JPEG_VERSION v8c)
-set(JPEG_SOURCE ${JPEG_URL}/${JPEG_GZ})
-
-#grib2/jasper need this therefore cdms2 can't turn off
-#if (CDAT_BUILD_GRAPHICS)
-add_cdat_package(jpeg "" "" OFF)
-#endif()
-
diff --git a/CMake/cdat_modules/lapack_deps.cmake b/CMake/cdat_modules/lapack_deps.cmake
deleted file mode 100644
index cc817469990323901b316db17ec5eaf800878537..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lapack_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(LAPACK_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/lapack_external.cmake b/CMake/cdat_modules/lapack_external.cmake
deleted file mode 100644
index a8b3f6139e4ddde31453990eecb48e8cfb935f48..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lapack_external.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-# The LAPACK external project
-
-set(lapack_source "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK")
-set(lapack_binary "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK-build")
-set(lapack_install "${cdat_EXTERNALS}")
-set(NUMPY_LAPACK_binary ${lapack_binary})
-
-ExternalProject_Add(LAPACK
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${lapack_source}
-  BINARY_DIR ${lapack_binary}
-  INSTALL_DIR ${lapack_install}
-  URL ${LAPACK_URL}/${LAPACK_GZ}
-  URL_MD5 ${LAPACK_MD5}
-  CMAKE_ARGS
-    -DCMAKE_Fortran_COMPILER:FILEPATH=${CMAKE_Fortran_COMPILER}
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DENABLE_TESTING:BOOL=OFF
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${LAPACK_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/lapack_pkg.cmake b/CMake/cdat_modules/lapack_pkg.cmake
deleted file mode 100644
index 3f5b9b81e8bbe92bd158d8134e90e59b49ff273f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lapack_pkg.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-set(LAPACK_MAJOR_SRC 3)
-set(LAPACK_MINOR_SRC 4)
-set(LAPACK_PATCH_SRC 2)
-
-set(LAPACK_URL ${LLNL_URL})
-set(LAPACK_GZ lapack-${LAPACK_MAJOR_SRC}.${LAPACK_MINOR_SRC}.${LAPACK_PATCH_SRC}.tgz)
-set(LAPACK_MD5 61bf1a8a4469d4bdb7604f5897179478 )
-
-set (nm LAPACK)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-
-#Removing apple exclusion for now
-set(LAPACK_SOURCE ${LAPACK_URL}/${LAPACK_GZ})
-
-if(NOT APPLE)
-  if(CMAKE_Fortran_COMPILER)
-    add_cdat_package(LAPACK "" "" OFF)
-  endif()
-endif()
diff --git a/CMake/cdat_modules/lats_deps.cmake b/CMake/cdat_modules/lats_deps.cmake
deleted file mode 100644
index 4f7aee7aa54c715a774064dd4c230fc79e81a40b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lats_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(lats_deps ${netcdf_pkg})
diff --git a/CMake/cdat_modules/lats_external.cmake b/CMake/cdat_modules/lats_external.cmake
deleted file mode 100644
index 519fc3a2e6cfd2159d92720e1788db921a131695..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lats_external.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-
-set(lats_source "${CMAKE_CURRENT_BINARY_DIR}/build/lats")
-set(lats_install "${cdat_EXTERNALS}")
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lats_Makefile.gfortran.in
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_LATS )
-    message("[INFO] [lats] Installing ${nm} from ${GIT_CMD_STR_LATS}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [lats] Installed ${nm} from tarball ${LATS_GZ}")
-    set(URL_STR URL ${LATS_URL}/${LATS_GZ})
-    set(URL_MD5_STR URL_MD5 ${LATS_MD5})
-    set(GIT_CMD_STR_LATS )
-    set(GIT_TAG )
-endif()
-set(LATS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile)
-set(LATS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile^^install)
-set(LATS_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(lats
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${lats_source}
-  INSTALL_DIR ${lats_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LATS}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${lats_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_LATS)
-  unset(GIT_CMD_STR_LATS)
-endif()
diff --git a/CMake/cdat_modules/lats_pkg.cmake b/CMake/cdat_modules/lats_pkg.cmake
deleted file mode 100644
index 545c0fe099f222607412a223d6cb9ae49f8e9a53..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lats_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(LATS_VERSION 1.0.0)
-set(LATS_BRANCH master)
-set(LATS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/lats.git )
-
-set(GIT_CMD_STR_LATS GIT_REPOSITORY ${LATS_REPOSITORY})
-set(GIT_TAG GIT_TAG "${LATS_BRANCH}")
-
-if (CDAT_BUILD_PCMDI)
-  add_cdat_package(lats "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/lepl_deps.cmake b/CMake/cdat_modules/lepl_deps.cmake
deleted file mode 100644
index 0643a854046f5d01c172ede55ac4f4d554d87330..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lepl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(lepl_deps ${cdat_pkg} ${numexpr_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/lepl_external.cmake b/CMake/cdat_modules/lepl_external.cmake
deleted file mode 100644
index 80b680e29fc20a409bcc7b08772afa663f993b9b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lepl_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm lepl)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/lepl_pkg.cmake b/CMake/cdat_modules/lepl_pkg.cmake
deleted file mode 100644
index 9551ef522f1ed40fc52ad78a798d4085dc97213b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lepl_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(lepl_MAJOR_SRC 5)
-set(lepl_MINOR_SRC 1)
-set(lepl_PATCH_SRC 3)
-
-set (nm lepl)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LEPL_GZ LEPL-${LEPL_VERSION}.tar.gz)
-set(LEPL_SOURCE ${LLNL_URL}/${LEPL_GZ})
-set(LEPL_MD5 5f653984c57ad8efad828c5153660743 )
-
-add_cdat_package_dependent(lepl "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/libcdms_deps.cmake b/CMake/cdat_modules/libcdms_deps.cmake
deleted file mode 100644
index 904227d3b55577e4811aabe36c7f33058eb2d423..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libcdms_deps.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(libcdms_deps ${netcdf_pkg}
-              ${jasper_pkg} ${g2clib_pkg} ${tiff_pkg}
-              ${png_pkg} ${jpeg_pkg} )
-if (CDAT_BUILD_LIBDRS)
-    message("[INFO] ADDING LIBDRS TO LIBCDMS DEPNDENCIES")
-    LIST(APPEND libcdms_deps ${libdrs_pkg})
-endif()
diff --git a/CMake/cdat_modules/libcdms_external.cmake b/CMake/cdat_modules/libcdms_external.cmake
deleted file mode 100644
index 2ed64475c83ce3253908013beba143be07367c9f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libcdms_external.cmake
+++ /dev/null
@@ -1,56 +0,0 @@
-set(libcdms_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcdms")
-set(libcdms_install "${cdat_EXTERNALS}")
-
-if(APPLE)
-    set(WITHPNGLIB "/usr/X11R6/lib")
-else()
-    set(WITHPNGLIB "no")
-endif()
-
-if (CDAT_BUILD_LIBDRS)
-    message("[INFO] ENBLING DRS IN CDMS")
-    set(drs_opt --enable-drs^^--with-drslib=${cdat_EXTERNALS}/lib^^--with-drsinc=${cdat_EXTERNALS}/include^^--with-drsincf=${cdat_EXTERNALS}/include)
-else()
-    set(drs_opt "")
-endif()
-
-set(CONFIGURE_ARGS --srcdir=${libcdms_source}^^--enable-dap^^${drs_opt}^^--enable-hdf=no^^--enable-pp=yes^^--enable-ql=no^^--cache-file=/dev/null^^--prefix=${libcdms_install}^^--with-nclib=${cdat_EXTERNALS}/lib^^--with-ncinc=${cdat_EXTERNALS}/include^^--with-daplib=/lib^^--with-dapinc=/include^^--with-hdfinc=./include^^--with-hdflib=./lib^^--with-hdf5lib=${cdat_EXTERNALS}/lib^^--with-pnglib=${WITHPNGLIB}^^--with-grib2lib=${cdat_EXTERNALS}/lib^^--with-jasperlib=${cdat_EXTERNALS}/lib^^--with-grib2inc=${cdat_EXTERNALS}/include^^--enable-grib2)
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/man/man3)
-    
-
-if(DEFINED GIT_CMD_STR_LIBCDMS )
-    message("[INFO] [libcdms] Installing ${nm} from ${GIT_CMD_STR_LIBCDMS}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [libcdms] Installed ${nm} from tarball ${LIBCDMS_GZ}")
-    set(URL_STR URL ${LIBCDMS_URL}/${LIBCDMS_GZ})
-    set(URL_MD5_STR URL_MD5 ${LIBCDMS_MD5})
-    set(GIT_CMD_STR_LIBCDMS )
-    set(GIT_TAG_LIBCDMS )
-endif()
-set(LIBCDMS_MAKE_ARGS -j1)
-set(LIBCDMS_BUILD_ARGS -fPIC)
-ExternalProject_Add(libcdms
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libcdms_source}
-  INSTALL_DIR ${libcdms_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LIBCDMS}
-  ${GIT_TAG_LIBCDMS}
-  PATCH_COMMAND ${CMAKE_COMMAND} -E remove <SOURCE_DIR>/zconf.h
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS}  -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBCDMS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${libcdms_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR)
-  unset(GIT_CMD_STR)
-endif()
-if (DEFINED GIT_CMD_STR_LIBCDMS)
-  unset(GIT_CMD_STR_LIBCDMS)
-endif()
diff --git a/CMake/cdat_modules/libcdms_pkg.cmake b/CMake/cdat_modules/libcdms_pkg.cmake
deleted file mode 100644
index eb7a722eba41ed7f7ee98b61ed5c15f6fe383e69..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libcdms_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(LIBCDMS_VERSION 1.0.0)
-set(LIBCDMS_URL ${LLNL_URL})
-set(LIBCDMS_GZ libcdms-${LIBCDMS_VERSION}.tar.gz)
-set(LIBCDMS_SOURCE ${LIBCDMS_URL}/${LIBCDMS_GZ})
-set(LIBCDMS_MD5 ce71f54616f755d67fbbb6c81ca4fd62)
-set(LIBCDMS_BRANCH master)
-set(LIBCDMS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libcdms.git )
-
-set(GIT_CMD_STR_LIBCDMS GIT_REPOSITORY ${LIBCDMS_REPOSITORY})
-set(GIT_TAG_LIBCDMS GIT_TAG "${LIBCDMS_BRANCH}")
-
-add_cdat_package(libcdms "" "" OFF)
diff --git a/CMake/cdat_modules/libcf_deps.cmake b/CMake/cdat_modules/libcf_deps.cmake
deleted file mode 100644
index 5673f4b88a6c9369e693b12e7710b1d7aa68d4a1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libcf_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libcf_deps ${pkgconfig_pkg} ${python_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${uuid_pkg} ${clapack_pkg} ${lapack_pkg} )
diff --git a/CMake/cdat_modules/libcf_external.cmake b/CMake/cdat_modules/libcf_external.cmake
deleted file mode 100644
index 9eeca6839e63b3fa048ff67b33393732c93cb49a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libcf_external.cmake
+++ /dev/null
@@ -1,29 +0,0 @@
-
-set(libcf_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcf")
-set(libcf_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake
-  @ONLY)
-  
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake
-  @ONLY)
-
-set(libcf_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake)
-set(libcf_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake)
-
-ExternalProject_Add(libcf
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libcf_source}
-  INSTALL_DIR ${libcf_install}
-  URL ${LIBCF_URL}/${LIBCF_GZ}
-  URL_MD5 ${LIBCF_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  PATCH_COMMAND ""
-  BUILD_COMMAND ${libcf_build_command}
-  INSTALL_COMMAND ${libcf_install_command}
-  DEPENDS ${libcf_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/libcf_pkg.cmake b/CMake/cdat_modules/libcf_pkg.cmake
deleted file mode 100644
index eed8c349755512adb5148c7c759a5898b7e9465e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libcf_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(LIBCF_MAJOR 1)
-set(LIBCF_MINOR 0)
-set(LIBCF_PATCH beta11)
-set(LIBCF_VERSION ${LIBCF_MAJOR}.${LIBCF_MINOR}-${LIBCF_PATCH})
-set(LIBCF_URL ${LLNL_URL})
-set(LIBCF_GZ libcf-${LIBCF_VERSION}.tar.gz)
-set(LIBCF_MD5 aba4896eab79d36c7283fc7b75fb16ee)
-set(LIBCF_SOURCE ${LIBCF_URL}/${LIBCF_GZ})
-
-add_cdat_package_dependent(libcf "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/libdrs_deps.cmake b/CMake/cdat_modules/libdrs_deps.cmake
deleted file mode 100644
index 1958c1f35fb555ce954a96d9865236b3fec77d33..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libdrs_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libdrs_deps ${netcdf_pkg} ${g2clib_pkg})
diff --git a/CMake/cdat_modules/libdrs_external.cmake b/CMake/cdat_modules/libdrs_external.cmake
deleted file mode 100644
index f1b28967895b5c55de9285c5fed9b804d5f828d1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libdrs_external.cmake
+++ /dev/null
@@ -1,49 +0,0 @@
-set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs")
-set(libdrs_install "${cdat_EXTERNALS}")
-
-if (APPLE)
-    set(libdrs_make_file libdrs_Makefile.Mac.gfortran.in)
-else ()
-    set(libdrs_make_file libdrs_Makefile.Linux.gfortran.in)
-endif ()
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrs_make_file}
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_LIBDRS )
-    message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRS}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRS_GZ}")
-    set(URL_STR URL ${LIBDRS_URL}/${LIBDRS_GZ})
-    set(URL_MD5_STR URL_MD5 ${LIBDRS_MD5})
-    set(GIT_CMD_STR_LIBDRS )
-    set(GIT_TAG )
-endif()
-set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile)
-set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile^^install)
-set(LIBDRS_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(libdrs
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libdrs_source}
-  INSTALL_DIR ${libdrs_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LIBDRS}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${libdrs_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_LIBDRS)
-  unset(GIT_CMD_STR_LIBDRS)
-endif()
diff --git a/CMake/cdat_modules/libdrs_pkg.cmake b/CMake/cdat_modules/libdrs_pkg.cmake
deleted file mode 100644
index 6258a08d7c1544d49f65f8df5284727f74c82d8f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libdrs_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(LIBDRS_VERSION 1.0.0)
-set(LIBDRS_URL ${LLNL_URL})
-set(LIBDRS_BRANCH master)
-set(LIBDRS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git )
-
-set(GIT_CMD_STR_LIBDRS GIT_REPOSITORY ${LIBDRS_REPOSITORY})
-set(GIT_TAG GIT_TAG "${LIBDRS_BRANCH}")
-if (CDAT_BUILD_PCMDI)
-  set(CDAT_BUILD_LIBDRS ON)
-  add_cdat_package(libdrs "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/libdrsfortran_deps.cmake b/CMake/cdat_modules/libdrsfortran_deps.cmake
deleted file mode 100644
index c5db76f4b4b19853bc3c8100c8a7ac3bf7b16ffb..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libdrsfortran_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libdrsfortran_deps ${netcdf_pkg} ${g2clib_pkg} ${libcdms_pkg} ${libdrs_pkg})
diff --git a/CMake/cdat_modules/libdrsfortran_external.cmake b/CMake/cdat_modules/libdrsfortran_external.cmake
deleted file mode 100644
index ba6d738a4c7cd7d93a0609a9d97834ffb5653f33..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libdrsfortran_external.cmake
+++ /dev/null
@@ -1,46 +0,0 @@
-set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs")
-set(libdrs_install "${cdat_EXTERNALS}")
-
-set(libdrsfortran_make_file libdrs_Makefile.Mac.fwrap.gfortran.in)
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrsfortran_make_file}
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_LIBDRSFORTRAN )
-    message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRSFORTRAN}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRSFORTRAN_GZ}")
-    set(URL_STR URL ${LIBDRSFORTRAN_URL}/${LIBDRSFORTRAN_GZ})
-    set(URL_MD5_STR URL_MD5 ${LIBDRSFORTRAN_MD5})
-    set(GIT_CMD_STR_LIBDRS )
-    set(GIT_TAG )
-endif()
-
-set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile)
-set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile^^install)
-set(LIBDRS_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(libdrsfortran
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libdrs_source}
-  INSTALL_DIR ${libdrs_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LIBDRSFORTRAN}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${libdrsfortran_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_LIBDRS)
-  unset(GIT_CMD_STR_LIBDRS)
-endif()
diff --git a/CMake/cdat_modules/libdrsfortran_pkg.cmake b/CMake/cdat_modules/libdrsfortran_pkg.cmake
deleted file mode 100644
index 23e8e34a4a2dc365af86cca9ab76499c067af407..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libdrsfortran_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(LIBDRSFORTRAN_VERSION 1.0.0)
-set(LIBDRSFORTRAN_URL ${LLNL_URL})
-set(LIBDRSFORTRAN_BRANCH master)
-set(LIBDRSFORTRAN_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git )
-
-set(GIT_CMD_STR_LIBDRSFORTRAN GIT_REPOSITORY ${LIBDRSFORTRAN_REPOSITORY})
-set(GIT_TAG GIT_TAG "${LIBDRSFORTRAN_BRANCH}")
-if (CDAT_BUILD_PCMDI)
-    if (APPLE)
-        set(CDAT_BUILD_LIBDRSFORTRAN ON)
-        add_cdat_package(libdrsfortran "" "" ON)
-    endif()
-endif()
diff --git a/CMake/cdat_modules/libxml2_deps.cmake b/CMake/cdat_modules/libxml2_deps.cmake
deleted file mode 100644
index cd79834e8fedfe3ea1cb3f7257fdae3411dffe15..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libxml2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libXML2_deps ${pkgconfig_pkg} ${readline_pkg})
diff --git a/CMake/cdat_modules/libxml2_external.cmake b/CMake/cdat_modules/libxml2_external.cmake
deleted file mode 100644
index 59216b6b5a2d5f70843ea595b9e0e9e74dd562e3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libxml2_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-
-set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2")
-set(libXML2_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(libXML2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libXML2_source}
-  INSTALL_DIR ${libXML2_install}
-  URL ${XML_URL}/${XML_GZ}
-  URL_MD5 ${XML_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${libXML2_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/libxml2_pkg.cmake b/CMake/cdat_modules/libxml2_pkg.cmake
deleted file mode 100644
index fd2f57ad281af0f106b5a690d246729ebab47cf2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libxml2_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(XML_MAJOR 2)
-set(XML_MINOR 7)
-set(XML_PATCH 8)
-set(XML_MAJOR_SRC 2)
-set(XML_MINOR_SRC 7)
-set(XML_PATCH_SRC 8)
-set(XML_URL ${LLNL_URL})
-set(XML_GZ libxml2-${XML_MAJOR_SRC}.${XML_MINOR_SRC}.${XML_PATCH_SRC}.tar.gz)
-set(XML_MD5 8127a65e8c3b08856093099b52599c86)
-
-set (nm XML)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LIBXML2_VERSION ${XML_VERSION})
-set(LIBXML2_SOURCE ${XML_URL}/${XML_GZ} )
-set(LIBXML2_MD5 ${XML_MD5})
-
-add_cdat_package(libXML2 "" "Bulid libxml2" OFF)
-
diff --git a/CMake/cdat_modules/libxslt_deps.cmake b/CMake/cdat_modules/libxslt_deps.cmake
deleted file mode 100644
index 31ab3ff7fa86b7d9631ab721a7eccf33ee118126..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libxslt_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libXSLT_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg})
diff --git a/CMake/cdat_modules/libxslt_external.cmake b/CMake/cdat_modules/libxslt_external.cmake
deleted file mode 100644
index 2064cf209d35eb56b64601e6e851ffd78c114ef5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libxslt_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-
-set(libXSLT_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXSLT")
-set(libXSLT_install "${cdat_EXTERNALS}")
-
-if(NOT LIBXML2_FOUND)
-  set(libXSLT_configure_args --with-libxml-prefix=${libXSLT_install})
-endif()
-
-ExternalProject_Add(libXSLT
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libXSLT_source}
-  INSTALL_DIR ${libXSLT_install}
-  URL ${XSLT_URL}/${XSLT_GZ}
-  URL_MD5 ${XSLT_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${libXSLT_configure_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${libXSLT_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/libxslt_pkg.cmake b/CMake/cdat_modules/libxslt_pkg.cmake
deleted file mode 100644
index d763d76ba137f2e5cbb3c5aa032994120ce23bc4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/libxslt_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(XSLT_MAJOR 1)
-set(XSLT_MINOR 1)
-set(XSLT_PATCH 22)
-set(XSLT_MAJOR_SRC 1)
-set(XSLT_MINOR_SRC 1)
-set(XSLT_PATCH_SRC 26)
-set(XSLT_URL ${LLNL_URL})
-set(XSLT_GZ libxslt-${XSLT_MAJOR_SRC}.${XSLT_MINOR_SRC}.${XSLT_PATCH_SRC}.tar.gz)
-set(XSLT_MD5 e61d0364a30146aaa3001296f853b2b9)
-
-set (nm XSLT)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LIBXSLT_VERSION ${XSLT_VERSION})
-set(LIBXSLT_SOURCE ${XSLT_URL}/${XSLT_GZ})
-set(LIBXSLT_MD5 ${XSLT_MD5})
-
-add_cdat_package(libXSLT "" "Build xslt" OFF)
-
diff --git a/CMake/cdat_modules/lxml_deps.cmake b/CMake/cdat_modules/lxml_deps.cmake
deleted file mode 100644
index 52670d8f93e91561a97983d70439c089f87a211b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lxml_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(lxml_deps ${cython_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/lxml_external.cmake b/CMake/cdat_modules/lxml_external.cmake
deleted file mode 100644
index 3b8a91e1516abb860ddbcb1664d63555650f8de6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lxml_external.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-# create an external project to install lxml,
-# and configure and build it
-set(LXML_SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/lxml)
-set(LXML_BINARY_DIR ${LXML_SOURCE_DIR})
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_build_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake @ONLY)
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake @ONLY)
-
-set(LXML_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake)
-set(LXML_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake)
-
-ExternalProject_Add(lxml
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${LXML_SOURCE_DIR}
-  URL ${LXML_URL}/${LXML_GZ}
-  URL_MD5 ${LXML_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${LXML_BUILD_COMMAND}
-  INSTALL_COMMAND ${LXML_INSTALL_COMMAND}
- # INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${lxml_deps}
-  ${ep_log_options}
- )
diff --git a/CMake/cdat_modules/lxml_pkg.cmake b/CMake/cdat_modules/lxml_pkg.cmake
deleted file mode 100644
index df4fb236d7ba746730d995e1b4fa6f8802b7055a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/lxml_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(LXML_MAJOR_SRC 2)
-set(LXML_MINOR_SRC 3)
-set(LXML_PATCH_SRC 5)
-set(LXML_URL ${LLNL_URL})
-set(LXML_GZ lxml-${LXML_MAJOR_SRC}.${LXML_MINOR_SRC}.${LXML_PATCH_SRC}.tar.gz)
-set(LXML_MD5 730bb63383528b65eaa099d64ce276cf)
-
-set (nm LXML)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LXML_SOURCE ${LXML_URL}/${LXML_GZ})
-
-add_cdat_package_dependent(lxml "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/markupsafe_deps.cmake b/CMake/cdat_modules/markupsafe_deps.cmake
deleted file mode 100644
index 2b76bd653b7bbd8219e0358037144245f98fa242..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/markupsafe_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(markupsafe_deps ${pip_pkg} ${pygments_pkg})
diff --git a/CMake/cdat_modules/markupsafe_external.cmake b/CMake/cdat_modules/markupsafe_external.cmake
deleted file mode 100644
index 9ea130d01b063a2ed5df51f5af57ce31362630c5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/markupsafe_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm markupsafe)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/markupsafe_pkg.cmake b/CMake/cdat_modules/markupsafe_pkg.cmake
deleted file mode 100644
index b4e664655b37ce077810724c2a8465e00f5cf259..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/markupsafe_pkg.cmake
+++ /dev/null
@@ -1,8 +0,0 @@
-set(markupsafe_MAJOR_SRC 0)
-set(markupsafe_MINOR_SRC 18)
-set(markupsafe_PATCH_SRC )
-set(MARKUPSAFE_VERSION ${markupsafe_MAJOR_SRC}.${markupsafe_MINOR_SRC})
-set(MARKUPSAFE_GZ MarkupSafe-${MARKUPSAFE_VERSION}.tar.gz)
-set(MARKUPSAFE_SOURCE ${LLNL_URL}/${MARKUPSAFE_GZ})
-set(MARKUPSAFE_MD5 f8d252fd05371e51dec2fe9a36890687)
-add_cdat_package_dependent(markupsafe "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/matplotlib_deps.cmake b/CMake/cdat_modules/matplotlib_deps.cmake
deleted file mode 100644
index 794a6a4766233df2f648f8cea4af03fe04288c10..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/matplotlib_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Matplotlib_deps ${pyqt_pkg} ${freetype_pkg} ${cairo_pkg} ${numpy_pkg} ${png_pkg} ${six_pkg} ${dateutils_pkg} ${pyparsing_pkg} ${cycler_pkg})
diff --git a/CMake/cdat_modules/matplotlib_external.cmake b/CMake/cdat_modules/matplotlib_external.cmake
deleted file mode 100644
index 8cbbd53f660bda7d6e9a9d852b418519dedb88e9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/matplotlib_external.cmake
+++ /dev/null
@@ -1,38 +0,0 @@
-# Matplotlib
-#
-set(matplotlib_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/Matplotlib")
-
-if(CDAT_BUILD_GUI)
-  set(MATPLOTLIB_BACKEND "Qt4Agg")
-else()
-  set(MATPLOTLIB_BACKEND "Agg")
-endif()
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_patch_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake"
-  @ONLY
-)
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake"
-  @ONLY
-)
-
-set(matplotlib_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake)
-set(matplotlib_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake)
-
-ExternalProject_Add(Matplotlib
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${matplotlib_source_dir}
-  URL ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ}
-  URL_MD5 ${MATPLOTLIB_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  PATCH_COMMAND ${matplotlib_patch_command}
-  BUILD_COMMAND ${matplotlib_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${Matplotlib_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/matplotlib_pkg.cmake b/CMake/cdat_modules/matplotlib_pkg.cmake
deleted file mode 100644
index 365a67c93273e326639dc1be3d4e90eaa590aed5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/matplotlib_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(MATPLOTLIB_MAJOR_MIN 1)
-set(MATPLOTLIB_MINOR_MIN 1)
-set(MATPLOTLIB_PATCH_MIN 0)
-set(MATPLOTLIB_MAJOR 1)
-set(MATPLOTLIB_MINOR 5)
-set(MATPLOTLIB_PATCH 1)
-set(MATPLOTLIB_VERSION ${MATPLOTLIB_MAJOR}.${MATPLOTLIB_MINOR}.${MATPLOTLIB_PATCH})
-set(MATPLOTLIB_URL ${LLNL_URL})
-set(MATPLOTLIB_GZ matplotlib-${MATPLOTLIB_VERSION}.tar.gz)
-set(MATPLOTLIB_MD5 b22dc4962f36aab919a7125b3b35953b)
-
-set(nm MATPLOTLIB)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(MATPLOTLIB_SOURCE ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ})
-
-add_cdat_package_dependent(Matplotlib "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/mccabe_deps.cmake b/CMake/cdat_modules/mccabe_deps.cmake
deleted file mode 100644
index 1d322a3534a08a6ef61631f1490126877a3d422c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mccabe_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(mccabe_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/mccabe_external.cmake b/CMake/cdat_modules/mccabe_external.cmake
deleted file mode 100644
index 79e6561e5942359c4176703ec76e78a7bc87fc81..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mccabe_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-ExternalProject_Add(mccabe
-  DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}"
-  SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/mccabe"
-  URL "${MCCABE_SOURCE}"
-  URL_MD5 ${MCCABE_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build
-  INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${mccabe_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/mccabe_pkg.cmake b/CMake/cdat_modules/mccabe_pkg.cmake
deleted file mode 100644
index e2e3795a4eebecd0b6395b2e6029ce8ae8f731e2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mccabe_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(nm mccabe)
-string(TOUPPER ${nm} uc_nm)
-
-set(${uc_nm}_MAJOR 0)
-set(${uc_nm}_MINOR 3)
-set(${uc_nm}_PATCH 1)
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_URL ${LLNL_URL})
-set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz)
-set(${uc_nm}_MD5 9a1570c470ff5db678cc0c03d5c0c237 )
-
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ})
-
-if(BUILD_TESTING)
-  add_cdat_package(${nm} "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/md5_deps.cmake b/CMake/cdat_modules/md5_deps.cmake
deleted file mode 100644
index 3ba1ef5977def5dd12771b21811f522902fbf459..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/md5_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(MD5_deps)
diff --git a/CMake/cdat_modules/md5_pkg.cmake b/CMake/cdat_modules/md5_pkg.cmake
deleted file mode 100644
index a4ac90144c9771ca3af368689c912584b05869f7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/md5_pkg.cmake
+++ /dev/null
@@ -1,3 +0,0 @@
-find_package(MD5)
-set(MD5PRG ${MD5_EXECUTABLE})
-set(MD5CNT 1)
diff --git a/CMake/cdat_modules/mpi4py_deps.cmake b/CMake/cdat_modules/mpi4py_deps.cmake
deleted file mode 100644
index cbba65f4c196aa7db21734252172584d007316d5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mpi4py_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Mpi4py_deps ${numpy_pkg} ${mpi_pkg})
diff --git a/CMake/cdat_modules/mpi4py_external.cmake b/CMake/cdat_modules/mpi4py_external.cmake
deleted file mode 100644
index 4c1484d29280412f771ea7707439629b2b489641..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mpi4py_external.cmake
+++ /dev/null
@@ -1,50 +0,0 @@
-# The Mpi4py project 
-
-set(mpi4py_binary "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py")
-
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake @ONLY)
-
-set(mpi4py_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake)
-set(mpi4py_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake)
-
-set(Mpi4py_source "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py")
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(Mpi4py
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Mpi4py_source}
-  URL ${MPI4PY_URL}/${MPI4PY_GZ}
-  URL_MD5 ${MPI4PY_MD5}
-  BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/mpi4py
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${mpi4py_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${mpi4py_INSTALL_COMMAND}
-  DEPENDS 
-    ${Mpi4py_deps}
-  ${ep_log_options}
-  )
-
-# Mpi4py
-#
-
-#ExternalProject_Add(Mpi4py
-#  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-#  SOURCE_DIR ${Mpi4py_source}
-#  URL ${MPI4PY_URL}/${MPI4PY_GZ}
-#  URL_MD5 ${MPI4PY_MD5}
-#  BUILD_IN_SOURCE 1
-#  CONFIGURE_COMMAND ""
-#  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-#  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-#  DEPENDS ${Mpi4py_deps}
-#  ${ep_log_options}
-#  )
diff --git a/CMake/cdat_modules/mpi4py_pkg.cmake b/CMake/cdat_modules/mpi4py_pkg.cmake
deleted file mode 100644
index e87d6be26980d5861c3a0fbec8b77d3a8fb2d64b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mpi4py_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(MPI4PY_MAJOR 1)
-set(MPI4PY_MINOR 3)
-set(MPI4PY_VERSION ${MPI4PY_MAJOR}.${MPI4PY_MINOR})
-set(MPI4PY_URL http://uv-cdat.llnl.gov/cdat/resources)
-set(MPI4PY_GZ mpi4py-${MPI4PY_VERSION}.tar.gz)
-set(MPI4PY_MD5 978472a1a71f3142c866c9463dec7103)
-set(MPI4PY_SOURCE ${MPI4PY_URL}/${MPI4PY_GZ})
-
-add_cdat_package(Mpi4py "" "Bulid Mpi4py" OFF)
-if (CDAT_BUILD_PARALLEL)
-    set_property(CACHE CDAT_BUILD_MPI4PY PROPERTY VALUE ON)
-endif()
-
diff --git a/CMake/cdat_modules/mpi_deps.cmake b/CMake/cdat_modules/mpi_deps.cmake
deleted file mode 100644
index e134e5d1fe71b1c230c1306acba6e77c827220cc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mpi_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(MPI_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/mpi_external.cmake b/CMake/cdat_modules/mpi_external.cmake
deleted file mode 100644
index 8fbe6a66fcbf4d746c41cff70c4b0c98ab02bcb3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mpi_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(MPI_source "${CMAKE_CURRENT_BINARY_DIR}/build/MPI")
-set(MPI_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(MPI
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${MPI_source}
-  INSTALL_DIR ${MPI_install}
-  URL ${MPI_URL}/${MPI_GZ}
-  URL_MD5 ${MPI_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND <SOURCE_DIR>/configure --prefix=<INSTALL_DIR> --disable-vt
-  DEPENDS ${MPI_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/mpi_pkg.cmake b/CMake/cdat_modules/mpi_pkg.cmake
deleted file mode 100644
index c3397cd0c127fac4bf50f417cfa2b303bf75be00..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/mpi_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(MPI_MAJOR 1)
-set(MPI_MINOR 6)
-set(MPI_PATCH 4)
-set(MPI_URL ${LLNL_URL})
-set(MPI_GZ openmpi-${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH}.tar.gz)
-set(MPI_MD5 70aa9b6271d904c6b337ca326e6613d1)
-set(MPI_SOURCE ${MPI_URL}/${MPI_GZ})
-set(MPI_VERSION ${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH})
-
-add_cdat_package(MPI "" "Bulid MPI" OFF)
-
-if (CDAT_BUILD_PARALLEL)
-    set_property(CACHE CDAT_BUILD_MPI PROPERTY VALUE ON)
-endif()
diff --git a/CMake/cdat_modules/myproxyclient_deps.cmake b/CMake/cdat_modules/myproxyclient_deps.cmake
deleted file mode 100644
index a94e7aba74484abd9b9c59963bc6c9a82abf13aa..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/myproxyclient_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(MyProxyClient_deps  ${cryptography_pkg} ${pyopenssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/myproxyclient_external.cmake b/CMake/cdat_modules/myproxyclient_external.cmake
deleted file mode 100644
index eae57a9c73d66824a3b5c560b5c9cbede95fea81..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/myproxyclient_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm MyProxyClient)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/myproxyclient_pkg.cmake b/CMake/cdat_modules/myproxyclient_pkg.cmake
deleted file mode 100644
index 036b1bd0a675bf8e457fc5cd70443cf7b3e6826f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/myproxyclient_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(MYPROXYCLIENT_MAJOR_SRC 1)
-set(MYPROXYCLIENT_MINOR_SRC 3)
-set(MYPROXYCLIENT_PATCH_SRC 0)
-
-set (nm MYPROXYCLIENT)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(MYPROXYCLIENT_GZ MyProxyClient-${MYPROXYCLIENT_VERSION}.tar.gz)
-set(MYPROXYCLIENT_SOURCE ${LLNL_URL}/${MYPROXYCLIENT_GZ})
-set(MYPROXYCLIENT_MD5 829a299157f91f8ff8a6e5bc8ec1c09c )
-
-add_cdat_package_dependent(MyProxyClient "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/netcdf_deps.cmake b/CMake/cdat_modules/netcdf_deps.cmake
deleted file mode 100644
index c8da9fa7bf6ecf3858620a6a2a3ab9a6163c77f2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/netcdf_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(NetCDF_deps ${pkgconfig_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jpeg_pkg} )
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND NetCDF_deps ${mpi_pkg} )
-endif()
diff --git a/CMake/cdat_modules/netcdf_external.cmake b/CMake/cdat_modules/netcdf_external.cmake
deleted file mode 100644
index 3135cff493ae1fa9f258228909af70eef732af41..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/netcdf_external.cmake
+++ /dev/null
@@ -1,31 +0,0 @@
-set(netcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf")
-set(netcdf_install "${cdat_EXTERNALS}")
-set(netcdf_configure_args "--enable-netcdf-4")
-if (CDAT_BUILD_PARALLEL)
-  set(configure_file "cdatmpi_configure_step.cmake")
-else()
-  set(configure_file "cdat_configure_step.cmake")
-endif()
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/netcdf_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake
-  @ONLY)
-  
-set(netcdf_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake)
-
-ExternalProject_Add(NetCDF
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${netcdf_source}
-  INSTALL_DIR ${netcdf_install}
-  URL ${NC4_URL}/${NC4_GZ}
-  URL_MD5 ${NC4_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${netcdf_PATCH_COMMAND}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${netcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${NetCDF_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/netcdf_pkg.cmake b/CMake/cdat_modules/netcdf_pkg.cmake
deleted file mode 100644
index 9ea111ad8ab5f0b25acccddc162ffcff0625250c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/netcdf_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(NC4_MAJOR_SRC 4)
-set(NC4_MINOR_SRC 3)
-set(NC4_PATCH_SRC 3.1)
-set(NC4_URL ${LLNL_URL})
-set(NC4_GZ netcdf-${NC4_MAJOR_SRC}.${NC4_MINOR_SRC}.${NC4_PATCH_SRC}.tar.gz)
-set(NC4_MD5 5c9dad3705a3408d27f696e5b31fb88c )
-
-set (nm NC4)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(NETCDF_VERSION ${NC4_VERSION})
-set(NETCDF_SOURCE ${NC4_URL}/${NC4_GZ})
-set(NETCDF_MD5 ${NC4_MD5})
-
-add_cdat_package(NetCDF "" "" ON)
diff --git a/CMake/cdat_modules/netcdfplus_deps.cmake b/CMake/cdat_modules/netcdfplus_deps.cmake
deleted file mode 100644
index 7efe4f6ce76c2125ef6dd0123af6e74bcec7c7f3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/netcdfplus_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(NetCDFPLUS_deps ${netcdf_pkg})
diff --git a/CMake/cdat_modules/netcdfplus_external.cmake b/CMake/cdat_modules/netcdfplus_external.cmake
deleted file mode 100644
index 130b822981d55a418a6b00b9b843b31bf1537b74..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/netcdfplus_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(netcdfplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf-c++")
-set(netcdfplus_install "${cdat_EXTERNALS}")
-set(netcdfplus_configure_args "")
-
-ExternalProject_Add(NetCDFPLUS
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${netcdfplus_source}
-  INSTALL_DIR ${netcdfplus_install}
-  URL ${NC4PLUS_URL}/${NC4PLUS_GZ}
-  URL_MD5 ${NC4PLUS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${NetCDFPLUS_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/netcdfplus_pkg.cmake b/CMake/cdat_modules/netcdfplus_pkg.cmake
deleted file mode 100644
index cec5f82ecd8d6d1302e4812dbdc9057a9e828f30..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/netcdfplus_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(NC4PLUS_MAJOR_SRC 4)
-set(NC4PLUS_MINOR_SRC 2)
-set(NC4PLUS_PATCH_SRC 1.1)
-set(NC4PLUS_URL ${LLNL_URL})
-set(NC4PLUS_GZ netcdf-cxx-${NC4PLUS_MAJOR_SRC}.${NC4PLUS_MINOR_SRC}.tar.gz)
-set(NC4PLUS_MD5 0b09655cf977d768ced6c0d327dde176)
-
-set (nm NC4PLUS)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(NETCDFPLUS_VERSION ${NC4PLUS_VERSION})
-set(NETCDFPLUS_SOURCE ${NC4PLUS_URL}/${NC4PLUS_GZ})
-set(NETCDFPLUS_MD5 ${NC4PLUS_MD5})
-
-add_cdat_package_dependent(NetCDFPLUS "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/numexpr_deps.cmake b/CMake/cdat_modules/numexpr_deps.cmake
deleted file mode 100644
index 5ba77a20d05e6c307256cd424d9db8037b0790cd..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/numexpr_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Numexpr_deps ${pkgconfig_pkg} ${numpy_pkg} ${myproxyclient_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/numexpr_external.cmake b/CMake/cdat_modules/numexpr_external.cmake
deleted file mode 100644
index a87913eb39986921f0b5371e95bf4bb0bfc96a6e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/numexpr_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm Numexpr)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/numexpr_pkg.cmake b/CMake/cdat_modules/numexpr_pkg.cmake
deleted file mode 100644
index f8b18ab5cc7f00d8b2bc4fed3bf6f42179281183..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/numexpr_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(NUMEXPR_MAJOR 2)
-set(NUMEXPR_MINOR 2)
-set(NUMEXPR_PATCH 2)
-#set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR})
-set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR}.${NUMEXPR_PATCH})
-# Following not needed any longer using easy_install
-set(NUMEXPR_URL ${LLNL_URL})
-set(NUMEXPR_GZ numexpr-${NUMEXPR_VERSION}.tar.gz)
-set(NUMEXPR_MD5 18103954044b3039c0a74a6006c8e0a7)
-set(NUMEXPR_SOURCE ${NUMEXPR_URL}/${NUMEXPR_GZ})
-
-add_cdat_package(Numexpr "" "" OFF)
diff --git a/CMake/cdat_modules/numpy_deps.cmake b/CMake/cdat_modules/numpy_deps.cmake
deleted file mode 100644
index 5511925968e8648046638da366df394033ef2c00..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/numpy_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(NUMPY_deps ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg})
diff --git a/CMake/cdat_modules/numpy_external.cmake b/CMake/cdat_modules/numpy_external.cmake
deleted file mode 100644
index 1e4b313494506b76eec515164e353cd64c47f018..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/numpy_external.cmake
+++ /dev/null
@@ -1,45 +0,0 @@
-# The Numpy external project 
-
-set(NUMPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/NUMPY")
-
-# to configure numpy we run a cmake -P script
-# the script will create a site.cfg file
-# then run python setup.py config to verify setup
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake @ONLY
-)
-
-# to build numpy we also run a cmake -P script.
-# the script will set LD_LIBRARY_PATH so that 
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake @ONLY
-)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake @ONLY
-)
-
-set(NUMPY_CONFIGURE_COMMAND ${CMAKE_COMMAND}
-    -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake)
-set(NUMPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake)
-set(NUMPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake)
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(NUMPY
-  URL ${NUMPY_URL}/${NUMPY_GZ}
-  URL_MD5 ${NUMPY_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${NUMPY_binary}
-  BINARY_DIR ${NUMPY_binary}
-  CONFIGURE_COMMAND ${NUMPY_CONFIGURE_COMMAND}
-  BUILD_COMMAND ${NUMPY_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${NUMPY_INSTALL_COMMAND}
-  DEPENDS ${NUMPY_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/numpy_pkg.cmake b/CMake/cdat_modules/numpy_pkg.cmake
deleted file mode 100644
index bd67f56332105dbaef5c98270b426c1e6301f920..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/numpy_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(NUMPY_MAJOR 1)
-set(NUMPY_MINOR 9)
-set(NUMPY_PATCH 0)
-set(NUMPY_MAJOR_SRC 1)
-set(NUMPY_MINOR_SRC 9)
-set(NUMPY_PATCH_SRC 0)
-set(NUMPY_URL ${LLNL_URL})
-set(NUMPY_GZ numpy-${NUMPY_MAJOR_SRC}.${NUMPY_MINOR_SRC}.${NUMPY_PATCH_SRC}.tar.gz)
-set(NUMPY_MD5 a93dfc447f3ef749b31447084839930b)
-set(NUMPY_SOURCE ${NUMPY_URL}/${NUMPY_GZ})
-
-set (nm NUMPY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(NUMPY "" "" ON)
-set(NUMPY ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg})
diff --git a/CMake/cdat_modules/ocgis_deps.cmake b/CMake/cdat_modules/ocgis_deps.cmake
deleted file mode 100644
index 4968421a1ac9b8ac89c3b1dbcab3b1651d298b94..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ocgis_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ocgis_deps ${shapely_pkg} ${gdal_pkg} ${fiona_pkg} ${pynetcdf4_pkg})
diff --git a/CMake/cdat_modules/ocgis_external.cmake b/CMake/cdat_modules/ocgis_external.cmake
deleted file mode 100644
index db51295ba84b8755a3cf00cf4b61e157a89af17b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ocgis_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(ocgis_source "${CMAKE_CURRENT_BINARY_DIR}/build/ocgis")
-set(ocgis_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(ocgis
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ocgis_source}
-  INSTALL_DIR ${ocgis_install}
-  BUILD_IN_SOURCE 1
-  ${GIT_CMD_STR_OCGIS}
-  ${GIT_TAG}
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${ocgis_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_OCGIS)
-  unset(GIT_CMD_STR_OCGIS)
-endif()
diff --git a/CMake/cdat_modules/ocgis_pkg.cmake b/CMake/cdat_modules/ocgis_pkg.cmake
deleted file mode 100644
index ad6d852fff35b67602335bba10325482dea60a4e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/ocgis_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(OCGIS_SOURCE ${OCGIS_URL}/${OCGIS_GZ})
-set(OCGIS_BRANCH next)
-set(OCGIS_REPOSITORY ${GIT_PROTOCOL}github.com/NCPP/ocgis.git )
-
-set(GIT_CMD_STR_OCGIS GIT_REPOSITORY ${OCGIS_REPOSITORY})
-set(GIT_TAG GIT_TAG "${OCGIS_BRANCH}")
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(ocgis "" "" ON)
-else()
-  add_cdat_package(ocgis "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/openssl_deps.cmake b/CMake/cdat_modules/openssl_deps.cmake
deleted file mode 100644
index 22b675b476d3f36175debe9b3172837a1adf97c1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/openssl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(OPENSSL_deps )
diff --git a/CMake/cdat_modules/openssl_external.cmake b/CMake/cdat_modules/openssl_external.cmake
deleted file mode 100644
index 752d3395c77845d1b3bea623732a15339694a7b9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/openssl_external.cmake
+++ /dev/null
@@ -1,37 +0,0 @@
-set (OPENSSL_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/openssl")
-set (OPENSSL_INSTALL_DIR "${cdat_EXTERNALS}")
-
-execute_process (COMMAND uname -s COMMAND tr -d '\n'
-                 OUTPUT_VARIABLE HOST)
-STRING (TOLOWER ${HOST} HOST)
-execute_process (COMMAND uname -m COMMAND tr -d '\n'
-                 OUTPUT_VARIABLE ARCHITECTURE)
-
-get_filename_component (COMPILER "${CMAKE_C_COMPILER}" NAME_WE)
-
-if (APPLE)
-  if (ARCHITECTURE MATCHES "64$")
-    set (HOST "${HOST}64")
-  endif ()
-  set (COMPILER "cc")
-endif ()
-
-set (OPENSSL_CONF_ARGS "${HOST}-${ARCHITECTURE}-${COMPILER}")
-set (OPENSSL_CONF_ARGS
-    ${OPENSSL_CONF_ARGS}
-    "--prefix=${OPENSSL_INSTALL_DIR}")
-
-ExternalProject_Add (openssl
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${OPENSSL_SOURCE_DIR}
-  INSTALL_DIR ${OPENSSL_INSTALL_DIR}
-  URL ${OPENSSL_SOURCE_URL}
-  URL_MD5 ${OPENSSL_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${OPENSSL_SOURCE_DIR}/Configure ${OPENSSL_CONF_ARGS}
-  DEPENDS ${OPENSSL_DEPS}
-  ${ep_log_options}
-  )
-
-set (OPENSSL_INCLUDE_DIR "${OPENSSL_INSTALL_DIR}/include")
-set (OPENSSL_LIBRARY_DIR "${OPENSSL_INSTALL_DIR}/lib")
diff --git a/CMake/cdat_modules/openssl_pkg.cmake b/CMake/cdat_modules/openssl_pkg.cmake
deleted file mode 100644
index 440d0f532ff716b39ec646d0c29c8ef298f6c4e2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/openssl_pkg.cmake
+++ /dev/null
@@ -1,37 +0,0 @@
-option(CDAT_USE_SYSTEM_OPENSSL "Use system OpenSSL, if found." ON)
-mark_as_advanced(CDAT_USE_SYSTEM_OPENSSL)
-if(CDAT_USE_SYSTEM_OPENSSL)
-  find_package(OpenSSL QUIET)
-  if(OPENSSL_FOUND)
-    set(FILENAME_PATH_ARG "DIRECTORY")
-    if(CMAKE_VERSION VERSION_LESS 2.8.12)
-      # Support older version of GET_FILENAME_COMPONENT macro
-      # with legacy PATH argument
-      set(FILENAME_PATH_ARG "PATH")
-    endif(CMAKE_VERSION VERSION_LESS 2.8.12)
-    get_filename_component(OPENSSL_LIBRARY_DIR
-      "${OPENSSL_SSL_LIBRARY}" ${FILENAME_PATH_ARG})
-    message(STATUS "System OpenSSL found. "
-      "OpenSSL library directory: ${OPENSSL_LIBRARY_DIR}. "
-      "OpenSSL Version: ${OPENSSL_VERSION}")
-  endif(OPENSSL_FOUND)
-endif(CDAT_USE_SYSTEM_OPENSSL)
-
-if(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND)
-  set(OPENSSL_MAJOR_SRC 1)
-  set(OPENSSL_MINOR_SRC 0)
-  set(OPENSSL_PATCH_SRC 2e)
-  set(OPENSSL_VERSION
-    ${OPENSSL_MAJOR_SRC}.${OPENSSL_MINOR_SRC}.${OPENSSL_PATCH_SRC})
-  
-  message(STATUS "Compiling OpenSSL from source. Version: ${OPENSSL_VERSION}")
-
-  set(OPENSSL_URL ${LLNL_URL})
-  set(OPENSSL_GZ "openssl-${OPENSSL_VERSION}.tar.gz")
-  set(OPENSSL_MD5 5262bfa25b60ed9de9f28d5d52d77fc5)
-  set(OPENSSL_SOURCE_URL ${OPENSSL_URL}/${OPENSSL_GZ})
-  
-  # We've reached here because we need OpenSSL.
-  # Hence, defaulting to ON
-  add_cdat_package(openssl "" "" ON)
-endif(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND)
diff --git a/CMake/cdat_modules/osmesa_deps.cmake b/CMake/cdat_modules/osmesa_deps.cmake
deleted file mode 100644
index 2ee8b1857bdcc394529a25452ad8fb671fff0cbc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/osmesa_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(osmesa_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/osmesa_external.cmake b/CMake/cdat_modules/osmesa_external.cmake
deleted file mode 100644
index 23f4870a1eac4464f53d860fbecb940091252e8c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/osmesa_external.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-set(osmesa_source "${CMAKE_CURRENT_BINARY_DIR}/build/osmesa")
-set(osmesa_install "${cdat_EXTERNALS}")
-
-set(osmesa_conf_args "--with-driver=osmesa")
-set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium")
-set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium-intel")
-set(osmesa_conf_args "${osmesa_conf_args}^^--disable-egl")
-
-ExternalProject_Add(OSMesa
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${osmesa_source}
-  INSTALL_DIR ${osmesa_install}
-  URL ${OSMESA_URL}/${OSMESA_GZ}
-  URL_MD5 ${OSMESA_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND
-    "${CMAKE_COMMAND}"
-      "-DCONFIGURE_ARGS=${osmesa_conf_args}"
-      "-DINSTALL_DIR=<INSTALL_DIR>"
-      "-DWORKING_DIR=<SOURCE_DIR>"
-      -P "${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake"
-  DEPENDS ${osmesa_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/osmesa_pkg.cmake b/CMake/cdat_modules/osmesa_pkg.cmake
deleted file mode 100644
index 1080dfb358a89d6b0c1bc3d9d93746b1d9b91ac3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/osmesa_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(package OSMesa)
-string(TOUPPER ${package} package_uc)
-
-# We're using an older mesa (7.6.1) as it is known to work well in many
-# supercomputing environments.
-set(${package_uc}_MAJOR_SRC 7)
-set(${package_uc}_MINOR_SRC 6)
-set(${package_uc}_PATCH_SRC 1)
-set(${package_uc}_VERSION "${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}")
-set(${package_uc}_URL ${LLNL_URL})
-set(${package_uc}_GZ "MesaLib-${${package_uc}_VERSION}.tar.gz")
-set(${package_uc}_MD5 e80fabad2e3eb7990adae773d6aeacba)
-set(${package_uc}_SOURCE "${${package_uc}_URL}/${${package_uc}_GZ}")
-
-add_cdat_package(${package} "7.6.1" "" OFF)
diff --git a/CMake/cdat_modules/paraview_deps.cmake b/CMake/cdat_modules/paraview_deps.cmake
deleted file mode 100644
index 6868b8da7d0253f679505b9f21b00a5ca123d210..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/paraview_deps.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(ParaView_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${png_pkg} ${jpeg_pkg} ${libxml2_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${zlib_pkg})
-
-if (NOT CDAT_BUILD_GUI)
-  list(APPEND ParaView_deps ${qt_pkg})
-endif()
-
-if(CDAT_BUILD_PARALLEL)
-  list(APPEND ParaView_deps "${mpi_pkg}")
-endif()
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND ParaView_deps "${ffmpeg_pkg}")
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND ParaView_deps "${osmesa_pkg}")
-endif()
diff --git a/CMake/cdat_modules/paraview_external.cmake b/CMake/cdat_modules/paraview_external.cmake
deleted file mode 100644
index 5c20dbc2a3196d168ff512701b66f11748d0096d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/paraview_external.cmake
+++ /dev/null
@@ -1,262 +0,0 @@
-set(ParaView_source "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView")
-set(ParaView_binary "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView-build")
-set(ParaView_install "${cdat_EXTERNALS}")
-
-if(QT_QMAKE_EXECUTABLE)
-  get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH)
-  get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH)
-endif()
-
-if(APPLE)
-  set(MACOSX_APP_INSTALL_PREFIX "${SB_EXTERNALS_DIR}") 
-endif()
-
-# Initialize
-set(ParaView_tpl_args)
-
-# VCS needs projections from GeoVis
-list(APPEND ParaView_tpl_args
-  -DModule_vtkViewsGeovis:BOOL=ON
-)
-list(APPEND ParaView_tpl_args
-  -DModule_vtklibproj4:BOOL=ON
-)
-
-# We would like to see CGM as well
-list(APPEND ParaView_tpl_args
-  -DModule_vtkIOCGM:BOOL=ON
-  )
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND ParaView_tpl_args -DPARAVIEW_ENABLE_FFMPEG:BOOL=ON)
-endif()
-
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_USE_MPI:BOOL=ON)
-  # Mac has issues with MPI4PY of ParaView. Also I don't know if we really need to build it
-  # See this bug: paraview.org/bug/view.php?id=13587
-  list(APPEND ParaView_tpl_args -DENABLE_MPI4PY:BOOL=OFF)
-
-  if(CDAT_BUILD_MPI)
-    if(UNIX)
-      set(ENV{LD_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{LD_LIBRARY_PATH}")
-    elseif(APPLE)
-      set(ENV{DYLD_FALLBACK_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{DYLD_FALLBACK_LIBRARY_PATH}")
-    endif()
-    list(APPEND ParaView_tpl_args
-      -DMPIEXEC:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec
-      -DMPI_CXX_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicxx
-      -DMPI_C_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicc
-      -DMPI_C_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include
-      -DMPI_CXX_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include
-      -DMACOSX_APP_INSTALL_PREFIX:PATH=${MACOSX_APP_INSTALL_PREFIX}
-      -DVTK_MPIRUN_EXE:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec)
-  endif()
-endif()
-
-set(_vtk_modules "vtkRenderingImage;vtkRenderingVolume;vtkRenderingLabel;vtkRenderingFreeType;vtkRenderingFreeTypeOpenGL;vtkRenderingVolumeOpenGL;vtkRenderingCore;vtkRenderingOpenGL;vtkGeovisCore;vtkViewsCore;vtkViewsGeovis;vtkInteractionImage;vtkInteractionStyle;vtkInteractionWidgets;vtkCommonTransforms;vtkCommonCore;vtkCommonComputationalGeometry;vtkCommonExecutionModel;vtkCommonSystem;vtkCommonMisc;vtkFiltersFlowPaths;vtkFiltersStatistics;vtkFiltersAMR;vtkFiltersGeneric;vtkFiltersSources;vtkFiltersModeling;vtkFiltersExtraction;vtkFiltersSelection;vtkFiltersSMP;vtkFiltersCore;vtkFiltersHybrid;vtkFiltersTexture;vtkFiltersGeneral;vtkFiltersImaging;vtkFiltersGeometry;vtkIOImage;vtkIOCore;vtkIOExport;vtkIOImport;vtkIOGeometry;vtkImagingColor;vtkImagingSources;vtkImagingCore;vtkImagingGeneral;vtkImagingMath")
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND _vtk_modules "vtkIOFFMPEG")
-endif()
-# Either we use cdat zlib and libxml or system zlib and libxml
-list(APPEND ParaView_tpl_args
-  -DVTK_USE_SYSTEM_ZLIB:BOOL=ON
-  -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON
-  -DVTK_USE_SYSTEM_HDF5:BOOL=ON
-  -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON
-  -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON
-)
-
-# Turn off testing and other non essential featues
-list(APPEND ParaView_tpl_args
-  -DBUILD_TESTING:BOOL=OFF
-  -DPARAVIEW_BUILD_PLUGIN_MobileRemoteControl:BOOL=OFF
-  -DPQWIDGETS_DISABLE_QTWEBKIT:BOOL=ON
-  -DModule_vtkIOGeoJSON:BOOL=ON
-  -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS}
-)
-
-# Use cdat zlib
-#if(NOT CDAT_USE_SYSTEM_ZLIB)
-#  list(APPEND ParaView_tpl_args
-#    -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-#    -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#  )
-#endif()
-
-# Use cdat libxml
-#if(NOT CDAT_USE_SYSTEM_LIBXML2)
-#  list(APPEND ParaView_tpl_args
-#    -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2
-#    -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX}
-#    -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint
-#  )
-#endif()
-
-# Use cdat hdf5
-if(NOT CDAT_USE_SYSTEM_HDF5)
-  list(APPEND ParaView_tpl_args
-    -DHDF5_DIR:PATH=${cdat_EXTERNALS}/
-    -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-  )
-
-#  if(NOT CDAT_USE_SYSTEM_ZLIB)
-#    list(APPEND ParaView_tpl_args
-#      -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#      -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#    )
-#  endif()
-endif()
-
-# Check if should build GUI
-if(CDAT_BUILD_GUI)
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_BUILD_QT_GUI:BOOL=ON
-    -DVTK_QT_USE_WEBKIT:BOOL=OFF
-    -DQT_QMAKE_EXECUTABLE:FILEPATH=${QT_QMAKE_EXECUTABLE}
-    -DQT_QTUITOOLS_INCLUDE_DIR:PATH=${QT_ROOT}/include/QtUiTools
-    -DQT_BINARY_DIR:FILEPATH=${QT_BINARY_DIR})
-else()
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_BUILD_QT_GUI:BOOL=OFF)
-endif()
-
-# Check if using R then only enable R support
-if (CDAT_BUILD_R OR CDAT_USE_SYSTEM_R)
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_USE_GNU_R:BOOL=ON
-    -DR_COMMAND:PATH=${R_install}/bin/R
-    -DR_DIR:PATH=${R_install}/lib/R
-    -DR_INCLUDE_DIR:PATH=${R_install}/lib/R/include
-    -DR_LIBRARY_BASE:PATH=${R_install}/lib/R/lib/libR${_LINK_LIBRARY_SUFFIX}
-    -DR_LIBRARY_BLAS:PATH=${R_install}/lib/R/lib/libRblas${_LINK_LIBRARY_SUFFIX}
-    -DR_LIBRARY_LAPACK:PATH=${R_install}/lib/R/lib/libRlapack${_LINK_LIBRARY_SUFFIX}
-    -DR_LIBRARY_READLINE:PATH=)
-endif()
-
-if(UVCDAT_TESTDATA_LOCATION)
-  list(APPEND ParaView_tpl_args
-    -DUVCDAT_TestData:PATH=${UVCDAT_TESTDATA_LOCATION})
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND ParaView_tpl_args
-    "-DVTK_USE_X:BOOL=OFF"
-    "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON"
-    "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-    "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}"
-    "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-  )
-endif()
-
-include(GetGitRevisionDescription)
-set(paraview_branch ${PARAVIEW_BRANCH})
-
-get_git_head_revision(refspec sha)
-#if("${refspec}" STREQUAL "refs/heads/devel-master")
-#  set(paraview_branch uvcdat-next)
-#endif()
-
-string(REPLACE "//" "" GIT_PROTOCOL_PREFIX ${GIT_PROTOCOL})
-
-if (${GIT_PROTOCOL} STREQUAL "git://")
-  set(REPLACE_GIT_PROTOCOL_PREFIX "http:")
-else()
-  set(REPLACE_GIT_PROTOCOL_PREFIX "git:")
-endif()
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_download.sh.in
-  ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh @ONLY
-  )
-
-if (NOT OFFLINE_BUILD)
-    set(DOWNLOAD_CMD_STR  DOWNLOAD_COMMAND ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh)
-else ()
-    set(DOWNLOAD_CMD_STR)
-endif()
-
-set(_vtk_module_options)
-foreach(_module ${_vtk_modules})
-  list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON")
-endforeach()
-ExternalProject_Add(ParaView
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ParaView_source}
-  BINARY_DIR ${ParaView_binary}
-  INSTALL_DIR ${ParaView_install}
-  ${DOWNLOAD_CMD_STR}
-  GIT_TAG ${paraview_branch}
-  UPDATE_COMMAND ""
-  PATCH_COMMAND ""
-  CMAKE_CACHE_ARGS
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DBUILD_TESTING:BOOL=${BUILD_TESTING}
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-#    -DPARAVIEW_BUILD_AS_APPLICATION_BUNDLE:BOOL=OFF
-#    -DPARAVIEW_DISABLE_VTK_TESTING:BOOL=ON
-#    -DPARAVIEW_INSTALL_THIRD_PARTY_LIBRARIES:BOOL=OFF
- #   -DPARAVIEW_TESTING_WITH_PYTHON:BOOL=OFF
-    -DINCLUDE_PYTHONHOME_PATHS:BOOL=OFF
-    ${cdat_compiler_args}
-    ${ParaView_tpl_args}
-    # Python
-    -DPARAVIEW_ENABLE_PYTHON:BOOL=ON
-    -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE}
-    -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE}
-    -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY}
-    -DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=ON
-    -DVTK_LEGACY_SILENT:BOOL=ON
-    ${_vtk_module_options}
-    -DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${ParaView_deps}
-  ${ep_log_options}
-)
-
-# Install ParaView and VTK python modules via their setup.py files.
-
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vtk_install_python_module.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake
-#  @ONLY)
-
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_install_python_module.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake
-#  @ONLY)
-
-#ExternalProject_Add_Step(ParaView InstallParaViewPythonModule
-#  COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake
-#  DEPENDEES install
-#  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-#  )
-
-#ExternalProject_Add_Step(ParaView InstallVTKPythonModule
-#  COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake
-#  DEPENDEES install
-#  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-#  )
-
-# symlinks of Externals/bin get placed in prefix/bin so we need to symlink paraview
-# libs into prefix/lib as well for pvserver to work.
-if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib)
-  message("making ${ParaView_install}/lib")
-  file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib)
-endif()
-
-#ExternalProject_Add_Step(ParaView InstallParaViewLibSymlink
-#  COMMAND ${CMAKE_COMMAND} -E create_symlink ${ParaView_install}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} ${CMAKE_INSTALL_PREFIX}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}
-#  DEPENDEES install
-#  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-#)
-unset(GIT_CMD_STR)
-
diff --git a/CMake/cdat_modules/paraview_pkg.cmake b/CMake/cdat_modules/paraview_pkg.cmake
deleted file mode 100644
index c5fe1743bf1f6be8e25862dacbd5554df3ef8ca9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/paraview_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(PARAVIEW_MAJOR 4)
-set(PARAVIEW_MINOR 1)
-set(PARAVIEW_PATCH 0)
-set(PARAVIEW_VERSION ${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}.${PARAVIEW_PATCH})
-set(PARAVIEW_URL ${LLNL_URL})
-set(PARAVIEW_GZ ParaView-${PARAVIEW_VERSION}c.tar.gz)
-set(PARAVIEW_MD5)
-set(PARAVIEW_BRANCH uvcdat-master)
-set(PARAVIEW_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/ParaView.git )
-
-add_cdat_package_dependent(ParaView "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/pbmplus_external.cmake b/CMake/cdat_modules/pbmplus_external.cmake
deleted file mode 100644
index 03743c74ced3a39310db467a00bd0a7567daf681..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pbmplus_external.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-
-set(pbmplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/pbmplus")
-set(pbmplus_install "${cdat_EXTERNALS}")
-
-#cp ../../exsrc/src/pbmplus/pbmplus.h . ; cp ../../exsrc/src/pbmplus/libpbm1.c pbm  ;cp ../../exsrc/src/pbmplus/Makefile .
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pbmplus_configure_step.cmake.in
-    ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake
-    @ONLY)
-
-ExternalProject_Add(pbmplus
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pbmplus_source}
-  INSTALL_DIR ${pbmplus_install}
-  URL ${PBMPLUS_URL}/${PBMPLUS_GZ}
-  URL_MD5 ${PBMPLUS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/libpbm1.c ${pbmplus_source}/pbm/
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake
-  DEPENDS ${pbmplus_deps}
-  ${ep_log_options}
-)
-
-ExternalProject_Add_Step(pbmplus CopyPbmplusHeader
-  COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/pbmplus.h ${pbmplus_source}/
-  DEPENDEES patch
-  DEPENDERS configure
-  )
-
-#pbmplus install fails if this directory doesnt already exist.
-file(MAKE_DIRECTORY ${pbmplus_install}/man/mann)
diff --git a/CMake/cdat_modules/pep8_deps.cmake b/CMake/cdat_modules/pep8_deps.cmake
deleted file mode 100644
index e57f7cf7e3045ee0890edc3e08f2a8e7af2a90fa..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pep8_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pep8_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pep8_external.cmake b/CMake/cdat_modules/pep8_external.cmake
deleted file mode 100644
index c6dc541c76414f3273cff07deb3cb171ee7aeb0f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pep8_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# The pep8 project
-
-set(pep8_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pep8")
-
-ExternalProject_Add(pep8
-  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-  SOURCE_DIR ${pep8_binary}
-  URL ${PEP8_SOURCE}
-  URL_MD5 ${PEP8_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${pep8_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pep8_pkg.cmake b/CMake/cdat_modules/pep8_pkg.cmake
deleted file mode 100644
index 723e4b8d3e9d1865a25ba186b9bcfee1a05e5a20..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pep8_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set( PEP8_MAJOR 1 )
-set( PEP8_MINOR 5 )
-set( PEP8_PATCH 7)
-set( PEP8_VERSION ${PEP8_MAJOR}.${PEP8_MINOR}.${PEP8_PATCH} )
-set( PEP8_URL ${LLNL_URL} )
-set( PEP8_GZ pep8-${PEP8_VERSION}.tar.gz )
-set( PEP8_MD5 f6adbdd69365ecca20513c709f9b7c93 )
-
-set (nm PEP8)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(PEP8_SOURCE ${PEP8_URL}/${PEP8_GZ})
-
-if (BUILD_TESTING)
-  add_cdat_package(pep8 "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/pip_deps.cmake b/CMake/cdat_modules/pip_deps.cmake
deleted file mode 100644
index 35c1383e546b3c5ce874cdca2b765b84ff5ff29b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pip_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pip_deps  ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pip_external.cmake b/CMake/cdat_modules/pip_external.cmake
deleted file mode 100644
index 4c21cd6d32d6e3c0889a93b8ee3054e1a67d9748..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pip_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm pip)
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-
-include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake)
-string(TOUPPER ${nm} uc_nm)
-
-ExternalProject_Add(${nm}
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    URL ${${uc_nm}_SOURCE}
-    URL_MD5 ${${uc_nm}_MD5} 
-    BUILD_IN_SOURCE 1
-    CONFIGURE_COMMAND ""
-    BUILD_COMMAND ""
-    INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${CDAT_PACKAGE_CACHE_DIR}/${${uc_nm}_GZ}
-    DEPENDS ${${nm}_deps}
-    ${ep_log_options}
-    )
diff --git a/CMake/cdat_modules/pip_pkg.cmake b/CMake/cdat_modules/pip_pkg.cmake
deleted file mode 100644
index 7e442f2f26f530b4074d1341fe89324d9b8104aa..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pip_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(PIP_MAJOR_SRC 7)
-set(PIP_MINOR_SRC 1)
-set(PIP_PATCH_SRC 2)
-
-set (nm PIP)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(PIP_URL ${LLNL_URL})
-set(PIP_GZ pip-${PIP_VERSION}.tar.gz)
-set(PIP_SOURCE ${PIP_URL}/${PIP_GZ})
-set(PIP_MD5 3823d2343d9f3aaab21cf9c917710196)
-
-add_cdat_package(pip "" "" OFF)
diff --git a/CMake/cdat_modules/pixman_deps.cmake b/CMake/cdat_modules/pixman_deps.cmake
deleted file mode 100644
index 276a88585c7eb5e413bbe24f1107288788bcd1ca..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pixman_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pixman_deps ${pkgconfig_pkg} ${zlib_pkg} ${freetype_pkg})
diff --git a/CMake/cdat_modules/pixman_external.cmake b/CMake/cdat_modules/pixman_external.cmake
deleted file mode 100644
index bd043c7a007a47ef7bdf60c86385cb75c0ea8814..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pixman_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-
-set(pixman_source "${CMAKE_CURRENT_BINARY_DIR}/build/pixman")
-set(pixman_install "${cdat_EXTERNALS}")
-set(pixman_configure_args "--disable-gtk")
-
-ExternalProject_Add(pixman
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pixman_source}
-  INSTALL_DIR ${pixman_install}
-  URL ${PIX_URL}/${PIX_GZ}
-  URL_MD5 ${PIX_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pixman_configure_args} -DINSTALL_DIR=${pixman_install} -DWORKING_DIR=${pixman_source} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${pixman_deps}
-  ${ep_log_options}
-)
-
-set(pixman_DIR "${pixman_binary}" CACHE PATH "pixman binary directory" FORCE)
-mark_as_advanced(pixman_DIR)
diff --git a/CMake/cdat_modules/pixman_pkg.cmake b/CMake/cdat_modules/pixman_pkg.cmake
deleted file mode 100644
index 10590199f846b50974697e673a8eeae88080bb9e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pixman_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(PIX_MAJOR 0)
-set(PIX_MINOR 22)
-set(PIX_PATCH 2)
-set(PIX_MAJOR_SRC 0)
-set(PIX_MINOR_SRC 30)
-set(PIX_PATCH_SRC 0)
-set(PIX_URL ${LLNL_URL})
-set(PIX_GZ pixman-${PIX_MAJOR_SRC}.${PIX_MINOR_SRC}.${PIX_PATCH_SRC}.tar.gz)
-set(PIX_MD5 ae7ac97921dfa59086ca2231621a79c7 )
-
-
-set (nm PIX)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(PIXMAN_VERSION ${PIX_VERSION})
-set(PIXMAN_SOURCE ${PIX_URL}/${PIX_GZ})
-set(PIXMAN_MD5 ${PIX_MD5})
-
-add_cdat_package_dependent(pixman "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/pkgconfig_deps.cmake b/CMake/cdat_modules/pkgconfig_deps.cmake
deleted file mode 100644
index 106cfb0743978847b23b1e40b2ab8252e8e495b3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pkgconfig_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pkgconfig_deps ${wget_pkg})
diff --git a/CMake/cdat_modules/pkgconfig_external.cmake b/CMake/cdat_modules/pkgconfig_external.cmake
deleted file mode 100644
index 2b8bd158bec521bd6e7433e1d734280de52f4751..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pkgconfig_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-
-set(pkgconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/pkgconfig")
-set(pkgconfig_install "${cdat_EXTERNALS}")
-set(pkgconfig_config_args "--with-internal-glib")
-
-ExternalProject_Add(pkgconfig
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  URL ${PKG_URL}/${PKG_GZ}
-  URL_MD5 ${PKG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  SOURCE_DIR ${pkgconfig_source}
-  INSTALL_DIR ${pkgconfig_install}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pkgconfig_config_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${pkgconfig_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pkgconfig_pkg.cmake b/CMake/cdat_modules/pkgconfig_pkg.cmake
deleted file mode 100644
index ca3927731872a2946b67784590c16cee49fb3539..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pkgconfig_pkg.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-set(PKG_MAJOR 0)
-set(PKG_MINOR 9)
-set(PKG_PATCH 0)
-set(PKG_MAJOR_SRC 0)
-set(PKG_MINOR_SRC 28)
-set(PKG_PATCH_SRC 0)
-set(PKG_VERSION ${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.${PKG_PATCH_SRC})
-set(PKG_URL ${LLNL_URL})
-set(PKG_GZ pkg-config-${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.tar.gz)
-set(PKG_MD5 aa3c86e67551adc3ac865160e34a2a0d)
-set(PKGCONFIG_VERSION ${PKG_VERSION})
-set(PKGCONFIG_SOURCE ${PKG_URL}/${PKG_GZ})
-
-add_cdat_package(pkgconfig "" "" OFF)
-
-if(NOT CDAT_USE_SYSTEM_PKGCONFIG)
-  set(cdat_PKG_CONFIG_EXECUTABLE ${cdat_EXTERNALS}/bin/pkg-config)
-  set(ENV{PKG_CONFIG} "${cdat_PKG_CONFIG_EXECUTABLE}")
-  set(ENV{PKG_CONFIG_PATH} "${cdat_EXTERNALS}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-  set(ENV{PKG_CONFIG} ${cdat_PKG_CONFIG_EXECUTABLE})
-endif()
-
diff --git a/CMake/cdat_modules/pmw_deps.cmake b/CMake/cdat_modules/pmw_deps.cmake
deleted file mode 100644
index 8e1435b25072b0febfecca1e107cfc28d50b89a9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pmw_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Pmw_deps ${pkgconfig_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/pmw_external.cmake b/CMake/cdat_modules/pmw_external.cmake
deleted file mode 100644
index 202ed810e51a09f8116315f0853898c885ff5fd6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pmw_external.cmake
+++ /dev/null
@@ -1,30 +0,0 @@
-
-set(Pmw_source "${CMAKE_CURRENT_BINARY_DIR}/build/Pmw")
-set(Pmw_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake
-  @ONLY)
-  
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake
-  @ONLY)
-
-set(Pmw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake)
-set(Pmw_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake)
-
-ExternalProject_Add(Pmw
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Pmw_source}
-  INSTALL_DIR ${Pmw_install}
-  URL ${PMW_URL}/${PMW_GZ}
-  URL_MD5 ${PMW_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${Pmw_build_command}
-  INSTALL_COMMAND ${Pmw_install_command}
-  DEPENDS ${Pmw_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pmw_pkg.cmake b/CMake/cdat_modules/pmw_pkg.cmake
deleted file mode 100644
index f0a0031b586b28b76c94974e10199895359f67e8..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pmw_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(PMW_MAJOR 1)
-set(PMW_MINOR 3)
-set(PMW_MAJOR_SRC 1)
-set(PMW_MINOR_SRC 3)
-set(PMW_PATCH_SRC 2)
-set(PMW_URL ${LLNL_URL})
-set(PMW_GZ Pmw.${PMW_MAJOR_SRC}.${PMW_MINOR_SRC}.${PMW_PATCH_SRC}.tar.gz)
-set(PMW_MD5 7f30886fe9885ab3cf85dac6ce1fbda5)
-set(PMW_SOURCE ${PMW_URL}/${PMW_GZ})
-
-
-set (nm PMW)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-if (CDAT_BUILD_LEAN)
-    add_cdat_package_dependent(Pmw "" "" OFF "CDAT_BUILD_GUI" OFF)
-else()
-    add_cdat_package(Pmw "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/pnetcdf_deps.cmake b/CMake/cdat_modules/pnetcdf_deps.cmake
deleted file mode 100644
index 9b1966cce1e4375016829e5fc695e8eebc381bb8..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pnetcdf_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PNETCDF_deps ${pkgconfig_pkg} ${mpi_pkg})
diff --git a/CMake/cdat_modules/pnetcdf_external.cmake b/CMake/cdat_modules/pnetcdf_external.cmake
deleted file mode 100644
index 431348b850625c333abfe91c760c159040d1f7ca..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pnetcdf_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(pnetcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/pnetcdf")
-set(pnetcdf_install "${cdat_EXTERNALS}")
-set(pnetcdf_configure_args "--with-mpi=${cdat_EXTERNALS}")
-set(pnetcdf_additional_cflags "-fPIC")
-
-ExternalProject_Add(PNETCDF
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pnetcdf_source}
-  INSTALL_DIR ${pnetcdf_install}
-  URL ${PNETCDF_URL}/${PNETCDF_GZ}
-  URL_MD5 ${PNETCDF_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${pnetcdf_additional_cflags} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -D CONFIGURE_ARGS=${pnetcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${PNETCDF_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/pnetcdf_pkg.cmake b/CMake/cdat_modules/pnetcdf_pkg.cmake
deleted file mode 100644
index 02cf48e1928c9269d0760ddb26b62e8fe8f5ac22..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pnetcdf_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(PNETCDF_MAJOR_SRC 1)
-set(PNETCDF_MINOR_SRC 6)
-set(PNETCDF_PATCH_SRC 0)
-set(PNETCDF_URL ${LLNL_URL})
-set(PNETCDF_GZ parallel-netcdf-${PNETCDF_MAJOR_SRC}.${PNETCDF_MINOR_SRC}.${PNETCDF_PATCH_SRC}.tar.gz)
-set(PNETCDF_MD5 4893a50ddcd487a312c64383bdeb2631)
-
-set (nm PNETCDF)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(PNETCDF_SOURCE ${PNETCDF_URL}/${PNETCDF_GZ})
-
-add_cdat_package(PNETCDF "" "" OFF)
diff --git a/CMake/cdat_modules/png_deps.cmake b/CMake/cdat_modules/png_deps.cmake
deleted file mode 100644
index 43bad1a488f2939cf0683330f057792ff68ba1cc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/png_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(png_deps ${pkgconfig_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/png_external.cmake b/CMake/cdat_modules/png_external.cmake
deleted file mode 100644
index 3ba0b81a664ad8aea62acc7efaf7ad18cffa116c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/png_external.cmake
+++ /dev/null
@@ -1,45 +0,0 @@
-# If Windows we use CMake otherwise ./configure
-if(WIN32)
-
-  set(png_source "${CMAKE_CURRENT_BINARY_DIR}/png")
-  set(png_binary "${CMAKE_CURRENT_BINARY_DIR}/png-build")
-  set(png_install "${cdat_EXTERNALS}")
-
-  ExternalProject_Add(png
-  URL ${PNG_URL}/${PNG_GZ}
-  URL_MD5 ${PNG_MD5}
-  UPDATE_COMMAND ""
-  SOURCE_DIR ${png_source}
-  BINARY_DIR ${png_binary}
-  CMAKE_CACHE_ARGS
-    -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags}
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-    ${pv_tpl_compiler_args}
-    -DZLIB_INCLUDE_DIR:STRING=${ZLIB_INCLUDE_DIR}
-    -DZLIB_LIBRARY:STRING=${ZLIB_LIBRARY}
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${png_dependencies}
-  ${ep_log_options}
-  )
-
-else()
-
-  set(png_source "${CMAKE_CURRENT_BINARY_DIR}/build/png")
-  set(png_install "${cdat_EXTERNALS}")
-
-  ExternalProject_Add(png
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    SOURCE_DIR ${png_source}
-    INSTALL_DIR ${png_install}
-    URL ${PNG_URL}/${PNG_GZ}
-    URL_MD5 ${PNG_MD5}
-    BUILD_IN_SOURCE 1
-    PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/png/pngconf.h ${png_source}/pngconf.h
-    CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-    DEPENDS ${png_deps}
-    ${ep_log_options}
-  )
-
-endif()
diff --git a/CMake/cdat_modules/png_pkg.cmake b/CMake/cdat_modules/png_pkg.cmake
deleted file mode 100644
index 5a9f1e1f4682534a9a65e846fa819e34f2101c88..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/png_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(PNG_MAJOR 1)
-set(PNG_MINOR 4)
-set(PNG_PATCH 1)
-set(PNG_MAJOR_SRC 1)
-set(PNG_MINOR_SRC 5)
-set(PNG_PATCH_SRC 1)
-set(PNG_VERSION ${PNG_MAJOR_SRC}.${PNG_MINOR_SRC}.${PNG_PATCH_SRC})
-set(PNG_URL ${LLNL_URL})
-set(PNG_GZ libpng-${PNG_VERSION}.tar.gz)
-set(PNG_MD5 220035f111ea045a51e290906025e8b5)
-set(PNG_SOURCE ${PNG_URL}/${PNG_GZ})
-
-# Turns out grib2 (therefore cdms2 needs it so dont turn this off
-add_cdat_package(png "" "" ON)
diff --git a/CMake/cdat_modules/proj4_deps.cmake b/CMake/cdat_modules/proj4_deps.cmake
deleted file mode 100644
index ec110453c58848ec6383915b30d1ccab0ef06997..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/proj4_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(proj4_deps ${pkgconfig_pkg})
-if (CDAT_BUILD_PARALLEL)
-      list(APPEND proj4_deps ${mpi_pkg})
-endif()
diff --git a/CMake/cdat_modules/proj4_external.cmake b/CMake/cdat_modules/proj4_external.cmake
deleted file mode 100644
index 9bd122f5bb155fb8fa06d2a8aa9fa76dc7dcfee3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/proj4_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-set(proj4_source "${CMAKE_CURRENT_BINARY_DIR}/build/proj4")
-set(proj4_install "${cdat_EXTERNALS}/proj4")
-set(proj4_configure_args "")
-
-ExternalProject_Add(proj4
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${proj4_source}
-  INSTALL_DIR ${proj4_install}
-  BUILD_IN_SOURCE 1
-  URL ${PROJ4_SOURCE}
-  URL_MD5 ${PROJ4_MD5}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${proj4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${proj4_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_PROJ4)
-  unset(GIT_CMD_STR_PROJ4)
-endif()
diff --git a/CMake/cdat_modules/proj4_pkg.cmake b/CMake/cdat_modules/proj4_pkg.cmake
deleted file mode 100644
index 8bf542f527fd926d398e6ea9a609b8c9abbe62f3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/proj4_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(PROJ4_URL ${LLNL_URL})
-set(PROJ4_MAJOR_SRC 4)
-set(PROJ4_MINOR_SRC 9)
-set(PROJ4_PATCH_SRC 2)
-set(PROJ4_GZ proj.4-${PROJ4_MAJOR_SRC}.${PROJ4_MINOR_SRC}.${PROJ4_PATCH_SRC}.tar.gz)
-set(PROJ4_SOURCE ${PROJ4_URL}/${PROJ4_GZ})
-set(PROJ4_MD5 a6059d05592948d5f205ba432e359bd7)
-if (CDAT_BUILD_ALL)
-  add_cdat_package(proj4 "" "" ON)
-else()
-  add_cdat_package_dependent(proj4 "" "" ON "CDAT_BUILD_PROJ4" OFF)
-endif()
diff --git a/CMake/cdat_modules/pyasn1_deps.cmake b/CMake/cdat_modules/pyasn1_deps.cmake
deleted file mode 100644
index bf438928faf256ba29f89a20be19551ca0dfc86a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyasn1_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYASN1_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pyasn1_external.cmake b/CMake/cdat_modules/pyasn1_external.cmake
deleted file mode 100644
index dd35ee111443805ec5ab9b296574593f41263fb4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyasn1_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYASN1)
-
-# Set LDFlags and CFlags to make it easier to find OpenSSL
-list(APPEND USR_ENVS
-  "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}"
-  "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}"
-  )
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pyasn1_pkg.cmake b/CMake/cdat_modules/pyasn1_pkg.cmake
deleted file mode 100644
index ff69f7c518435f47dc63c942f8150979a61d1364..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyasn1_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYASN1_MAJOR_SRC 0)
-set(PYASN1_MINOR_SRC 1)
-set(PYASN1_PATCH_SRC 9)
-
-set(PYASN1_VERSION ${PYASN1_MAJOR_SRC}.${PYASN1_MINOR_SRC}.${PYASN1_PATCH_SRC})
-set(PYASN1_GZ pyasn1-${PYASN1_VERSION}.tar.gz)
-set(PYASN1_SOURCE ${LLNL_URL}/${PYASN1_GZ})
-set(PYASN1_MD5 f00a02a631d4016818659d1cc38d229a)
-
-add_cdat_package_dependent(PYASN1 "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyclimate_deps.cmake b/CMake/cdat_modules/pyclimate_deps.cmake
deleted file mode 100644
index ee5768752d177197f1be7223b2b08678d5624f6c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyclimate_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-set(pyclimate_deps ${numpy_pkg} ${pip_pkg})
-
diff --git a/CMake/cdat_modules/pyclimate_external.cmake b/CMake/cdat_modules/pyclimate_external.cmake
deleted file mode 100644
index 4fe52288f9d0fb9c449cfdad17265491de3b8f21..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyclimate_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-# create an external project to install PyClimate
-# and configure and build it
-set(nm pyclimate)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/pyclimate_pkg.cmake b/CMake/cdat_modules/pyclimate_pkg.cmake
deleted file mode 100644
index e151f3cff3e59ff1d32f64ea25f20b34849c403f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyclimate_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(PYCLIMATE_VERSION 1.2.3)
-set(PYCLIMATE_URL ${LLNL_URL})
-set(PYCLIMATE_GZ PyClimate-${PYCLIMATE_VERSION}.tar.gz)
-set(PYCLIMATE_SOURCE ${PYCLIMATE_URL}/${PYCLIMATE_GZ})
-set(PYCLIMATE_MD5 094ffd0adedc3ede24736e0c0ff1699f)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(pyclimate "" "" ON)
-else()
-  add_cdat_package(pyclimate "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/pycparser_deps.cmake b/CMake/cdat_modules/pycparser_deps.cmake
deleted file mode 100644
index 3efd2d4eef7fb4859e20014abd894b3a0b604405..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pycparser_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYCPARSER_deps ${python_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/pycparser_external.cmake b/CMake/cdat_modules/pycparser_external.cmake
deleted file mode 100644
index f9b317f4f1c512936992270e570472bc38a5f26b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pycparser_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYCPARSER)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pycparser_pkg.cmake b/CMake/cdat_modules/pycparser_pkg.cmake
deleted file mode 100644
index 873a293dde87ea9ea40ef0e55e6104060c2ae06c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pycparser_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYCPARSER_MAJOR_SRC 2)
-set(PYCPARSER_MINOR_SRC 13)
-set(PYCPARSER_PATCH_SRC )
-
-set(PYCPARSER_VERSION ${PYCPARSER_MAJOR_SRC}.${PYCPARSER_MINOR_SRC})
-set(PYCPARSER_GZ pycparser-${PYCPARSER_VERSION}.tar.gz)
-set(PYCPARSER_SOURCE ${LLNL_URL}/${PYCPARSER_GZ})
-set(PYCPARSER_MD5 e4fe1a2d341b22e25da0d22f034ef32f )
-
-add_cdat_package_dependent(PYCPARSER "" "" ON "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyflakes_deps.cmake b/CMake/cdat_modules/pyflakes_deps.cmake
deleted file mode 100644
index 14a40726d71dc68acf5fb65eea105092fa0f3c1c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyflakes_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pyflakes_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pyflakes_external.cmake b/CMake/cdat_modules/pyflakes_external.cmake
deleted file mode 100644
index 40a4774f739c73ca2f77150fb799a2a843d9c7d4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyflakes_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-ExternalProject_Add(pyflakes
-  DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}"
-  SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/pyflakes"
-  URL "${PYFLAKES_SOURCE}"
-  URL_MD5 ${PYFLAKES_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build
-  INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${pyflakes_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pyflakes_pkg.cmake b/CMake/cdat_modules/pyflakes_pkg.cmake
deleted file mode 100644
index a83f881c7688060ec41f6ff180ffcf38cd7b711c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyflakes_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(nm pyflakes)
-string(TOUPPER ${nm} uc_nm)
-
-set(${uc_nm}_MAJOR 0)
-set(${uc_nm}_MINOR 8)
-set(${uc_nm}_PATCH 1)
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_URL ${LLNL_URL})
-set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz)
-set(${uc_nm}_MD5 905fe91ad14b912807e8fdc2ac2e2c23 )
-
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ})
-
-if(BUILD_TESTING)
-  add_cdat_package(${nm} "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/pygments_deps.cmake b/CMake/cdat_modules/pygments_deps.cmake
deleted file mode 100644
index 8da947cd9c4199a71e3115b75a83613d4c913fc4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pygments_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pygments_deps ${pip_pkg} ${lepl_pkg})
diff --git a/CMake/cdat_modules/pygments_external.cmake b/CMake/cdat_modules/pygments_external.cmake
deleted file mode 100644
index 225a8b16008b4645319277c3520f8b9a849d88a1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pygments_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm pygments)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pygments_pkg.cmake b/CMake/cdat_modules/pygments_pkg.cmake
deleted file mode 100644
index 7bd13b2b5da240476f9ac82acb309fb93e7413a7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pygments_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(pygments_MAJOR_SRC 1)
-set(pygments_MINOR_SRC 6)
-set(pygments_PATCH_SRC )
-set(PYGMENTS_VERSION ${pygments_MAJOR_SRC}.${pygments_MINOR_SRC})
-set(PYGMENTS_GZ Pygments-${PYGMENTS_VERSION}.tar.gz)
-set(PYGMENTS_SOURCE ${LLNL_URL}/${PYGMENTS_GZ})
-set(PYGMENTS_MD5 a18feedf6ffd0b0cc8c8b0fbdb2027b1  )
-
-add_cdat_package_dependent(pygments "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/pylibxml2_deps.cmake b/CMake/cdat_modules/pylibxml2_deps.cmake
deleted file mode 100644
index 8ff91e2465ec254927c6cd0c3da26c9820067e91..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pylibxml2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYLIBXML2_deps ${pkgconfig_pkg} ${python_pkg} ${libxml2_pkg} ${libxslt_pkg})
diff --git a/CMake/cdat_modules/pylibxml2_external.cmake b/CMake/cdat_modules/pylibxml2_external.cmake
deleted file mode 100644
index f3d77fd17d664dd2d08538f0aa8aca93ab38b30d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pylibxml2_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-
-set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2")
-set(libXML2_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(PYLIBXML2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libXML2_source}
-  INSTALL_DIR ${libXML2_install}
-  URL ${XML_URL}/${XML_GZ}
-  URL_MD5 ${XML_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${PYLIBXML2_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pylibxml2_pkg.cmake b/CMake/cdat_modules/pylibxml2_pkg.cmake
deleted file mode 100644
index e374227f78adbccd9d7544402b7e86652e23b6f7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pylibxml2_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(PYLIBXML2_MAJOR 2)
-set(PYLIBXML2_MINOR 7)
-set(PYLIBXML2_PATCH 8)
-set(PYLIBXML2_MAJOR_SRC 2)
-set(PYLIBXML2_MINOR_SRC 7)
-set(PYLIBXML2_PATCH_SRC 8)
-set(PYLIBXML2_URL ${LLNL_URL})
-set(PYLIBXML2_GZ libxml2-${PYLIBXML2_MAJOR_SRC}.${PYLIBXML2_MINOR_SRC}.${PYLIBXML2_PATCH_SRC}.tar.gz)
-set(PYLIBXML2_MD5 8127a65e8c3b08856093099b52599c86)
-set(PYLIBXML2_SOURCE ${PYLIBXML2_URL}/${PYLIBXML2_GZ})
-
-set (nm PYLIBXML2)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(PYLIBXML2 "" "" OFF)
diff --git a/CMake/cdat_modules/pynetcdf4_deps.cmake b/CMake/cdat_modules/pynetcdf4_deps.cmake
deleted file mode 100644
index 176f1cd996a61e78bed7156d7ba77c873baa1e02..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pynetcdf4_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pynetcdf4_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${netcdf_pkg} ${numpy_pkg})
diff --git a/CMake/cdat_modules/pynetcdf4_external.cmake b/CMake/cdat_modules/pynetcdf4_external.cmake
deleted file mode 100644
index f430b9e16b7a94436aad22215d768834903ecbbf..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pynetcdf4_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-# create an external project to install pynetcdf
-# and configure and build it
-
-# pynetcdf4
-#
-set(pynetcdf4_source "${CMAKE_CURRENT_BINARY_DIR}/build/pynetcdf4")
-
-ExternalProject_Add(pynetcdf4
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pynetcdf4_source}
-  URL ${PYNETCDF4_URL}/${PYNETCDF4_GZ}
-  URL_MD5 ${PYNETCDF4_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${pynetcdf4_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/pynetcdf4_pkg.cmake b/CMake/cdat_modules/pynetcdf4_pkg.cmake
deleted file mode 100644
index ebefb167b2129adf4255b3aa886d9306a5a0fce4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pynetcdf4_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set( PYNETCDF4_MAJOR_SRC 1  )
-set( PYNETCDF4_MINOR_SRC 1 )
-set( PYNETCDF4_PATCH_SRC 9  )
-set(PYNETCDF4_URL ${LLNL_URL})
-set(PYNETCDF4_GZ
-  netCDF4-${PYNETCDF4_MAJOR_SRC}.${PYNETCDF4_MINOR_SRC}.${PYNETCDF4_PATCH_SRC}.tar.gz)
-set(PYNETCDF4_MD5 4ee7399e547f8b906e89da5529fa5ef4)
-set(PYNETCDF4_SOURCE ${PYNETCDF4_URL}/${PYNETCDF4_GZ})
-
-set (nm pynetcdf4)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(pynetcdf4 "" "" ON)
diff --git a/CMake/cdat_modules/pyopengl_external.cmake b/CMake/cdat_modules/pyopengl_external.cmake
deleted file mode 100644
index daf68bf40c7147387a7fd3e9986201e3fc67234f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyopengl_external.cmake
+++ /dev/null
@@ -1,29 +0,0 @@
-
-set(PyOpenGL_source "${CMAKE_CURRENT_BINARY_DIR}/PyOpenGL")
-set(PyOpenGL_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake
-  @ONLY)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake
-  @ONLY)
-
-set(PyOpenGL_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake)
-set(PyOpenGL_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake)
-
-ExternalProject_Add(PyOpenGL
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${PyOpenGL_source}
-  URL ${PYOPENGL_URL}/${PYOPENGL_GZ}
-  URL_MD5 ${PYOPENGL_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PyOpenGL_build_command}
-  INSTALL_COMMAND ${PyOpenGL_install_command}
-  DEPENDS ${PyOpenGL_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pyopenssl_deps.cmake b/CMake/cdat_modules/pyopenssl_deps.cmake
deleted file mode 100644
index 6ab54642e1b3e031605d95a8e9b87cc1f530fafc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyopenssl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYOPENSSL_deps ${python_pkg} ${pip_pkg} ${six_pkg} ${cryptography_pkg} ${cffi_pkg} ${ffi_pkg} ${pycparser_pkg} )
diff --git a/CMake/cdat_modules/pyopenssl_external.cmake b/CMake/cdat_modules/pyopenssl_external.cmake
deleted file mode 100644
index c0ed4c23863a346a5a91cf4ddad4a10682af1b66..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyopenssl_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYOPENSSL)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pyopenssl_pkg.cmake b/CMake/cdat_modules/pyopenssl_pkg.cmake
deleted file mode 100644
index d9e4d4bc4db0321cd6816fc0cff4efca108eab8f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyopenssl_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYOPENSSL_MAJOR_SRC 0)
-set(PYOPENSSL_MINOR_SRC 14)
-set(PYOPENSSL_PATCH_SRC 0)
-
-set(PYOPENSSL_VERSION ${PYOPENSSL_MAJOR_SRC}.${PYOPENSSL_MINOR_SRC})
-set(PYOPENSSL_GZ pyOpenSSL-${PYOPENSSL_VERSION}.tar.gz)
-set(PYOPENSSL_SOURCE ${LLNL_URL}/${PYOPENSSL_GZ})
-set(PYOPENSSL_MD5 8579ff3a1d858858acfba5f046a4ddf7)
-
-add_cdat_package_dependent(PYOPENSSL "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyparsing_deps.cmake b/CMake/cdat_modules/pyparsing_deps.cmake
deleted file mode 100644
index 79eea79134aa269c1719cd60aa63dccf6edf837f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyparsing_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYPARSING_deps ${python_pkg} ${pip_pkg} ${dateutils_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/pyparsing_external.cmake b/CMake/cdat_modules/pyparsing_external.cmake
deleted file mode 100644
index b72862852077166f2e3bdb7e154e8968f103f65c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyparsing_external.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYPARSING)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/pyparsing_pkg.cmake b/CMake/cdat_modules/pyparsing_pkg.cmake
deleted file mode 100644
index 8c6e265e020af1e63d892cae4900f3f5fc392d21..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyparsing_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYPARSING_MAJOR_SRC 2)
-set(PYPARSING_MINOR_SRC 0)
-set(PYPARSING_PATCH_SRC 2)
-
-set(PYPARSING_VERSION ${PYPARSING_MAJOR_SRC}.${PYPARSING_MINOR_SRC}.${PYPARSING_PATCH_SRC})
-set(PYPARSING_GZ pyparsing-${PYPARSING_VERSION}.tar.gz)
-set(PYPARSING_SOURCE ${LLNL_URL}/${PYPARSING_GZ})
-set(PYPARSING_MD5 b170c5d153d190df1a536988d88e95c1)
-
-add_cdat_package_dependent(PYPARSING "" "" OFF "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyqt_deps.cmake b/CMake/cdat_modules/pyqt_deps.cmake
deleted file mode 100644
index 023e6753a86e007d1c575f8fc2d1da3bdd87384c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyqt_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PyQt_deps ${pkgconfig_pkg} ${qt_pkg} ${sip_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/pyqt_external.cmake b/CMake/cdat_modules/pyqt_external.cmake
deleted file mode 100644
index 5a00060e986a520b08a9d7dc6d382fd8da0b98e7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyqt_external.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-set(PyQt_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyQt")
-set(PyQt_configure_command
-  env PYTHONPATH=${PYTHONPATH}
-  "${PYTHON_EXECUTABLE}" configure.py
-    -q "${QT_QMAKE_EXECUTABLE}"
-    --confirm-license
-    -b "${CMAKE_INSTALL_PREFIX}/bin"
-    -d "${PYTHON_SITE_PACKAGES}"
-    -v "${CMAKE_INSTALL_PREFIX}/include"
-    -v "${CMAKE_INSTALL_PREFIX}/share"
-    -p "${CMAKE_INSTALL_PREFIX}/share/plugins"
-    -n "${CMAKE_INSTALL_PREFIX}/share/qsci"
-    --assume-shared
-    -e QtGui -e QtHelp -e QtMultimedia -e QtNetwork -e QtDeclarative -e QtOpenGL
-    -e QtScript -e QtScriptTools -e QtSql -e QtSvg -e QtTest -e QtWebKit
-    -e QtXml -e QtXmlPatterns -e QtCore
-)
-
-ExternalProject_Add(PyQt
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${PyQt_source}
-  URL ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}}
-  URL_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${PyQt_configure_command}
-  DEPENDS ${PyQt_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pyqt_pkg.cmake b/CMake/cdat_modules/pyqt_pkg.cmake
deleted file mode 100644
index a049bc7281dcc35be0c06ceab99b7f84a61883ac..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyqt_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(PYQT_MAJOR 4)
-set(PYQT_MINOR 8)
-set(PYQT_PATCH 3)
-set(PYQT_MAJOR_SRC 4)
-set(PYQT_MINOR_SRC 11)
-set(PYQT_PATCH_SRC 3)
-set(PYQT_VERSION ${PYQT_MAJOR_SRC}.${PYQT_MINOR_SRC}.${PYQT_PATCH_SRC})
-set(PYQT_URL ${LLNL_URL})
-set(PYQT_GZ_APPLE PyQt-mac-gpl-${PYQT_VERSION}.tar.gz)
-set(PYQT_GZ_UNIX PyQt-x11-gpl-${PYQT_VERSION}.tar.gz)
-set(PYQT_MD5_APPLE 9bd050f1d0c91510ea8be9f41878144c )
-set(PYQT_MD5_UNIX 997c3e443165a89a559e0d96b061bf70 )
-set(PYQT_SOURCE ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}})
-set(PYQT_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}})
-
-add_cdat_package_dependent(PyQt "" "" ON "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/pyspharm_deps.cmake b/CMake/cdat_modules/pyspharm_deps.cmake
deleted file mode 100644
index 181e0c27166c785c9484eea68fbb8976103806df..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyspharm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pyspharm_deps ${numpy_pkg})
diff --git a/CMake/cdat_modules/pyspharm_external.cmake b/CMake/cdat_modules/pyspharm_external.cmake
deleted file mode 100644
index 2c1de4f91c0acfa1e7141f17a2e228489690350e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyspharm_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-
-# Pyspharm
-#
-set(pyspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyspharm")
-
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyspharm_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake
-  @ONLY)
-  
-set(pyspharm_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake)
-
-ExternalProject_Add(pyspharm
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pyspharm_source}
-  URL ${PYSPHARM_URL}/${PYSPHARM_GZ}
-  URL_MD5 ${PYSPHARM_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${pyspharm_PATCH_COMMAND}
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build 
-  INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --prefix=${PYTHON_SITE_PACKAGES_PREFIX}
-  DEPENDS ${pyspharm_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pyspharm_pkg.cmake b/CMake/cdat_modules/pyspharm_pkg.cmake
deleted file mode 100644
index c7e8eb166d17d91c64c095a96673a2a64195afd2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyspharm_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(PYSPHARM_MAJOR 1)
-set(PYSPHARM_MINOR 0)
-set(PYSPHARM_PATCH 8)
-set(PYSPHARM_VERSION ${PYSPHARM_MAJOR}.${PYSPHARM_MINOR}.${PYSPHARM_PATCH})
-set(PYSPHARM_URL ${LLNL_URL})
-set(PYSPHARM_GZ pyspharm-${PYSPHARM_VERSION}.tar.gz)
-set(PYSPHARM_MD5 7b3a33dd3cbeaa4b8bf67ed5bd210931)
-set(PYSPHARM_SOURCE ${PYSPHARM_URL}/${PYSPHARM_GZ})
-
-add_cdat_package_dependent(pyspharm "" "" ${CDAT_BUILD_ALL}
-                           "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pytables_deps.cmake b/CMake/cdat_modules/pytables_deps.cmake
deleted file mode 100644
index d44617773380c07372d0da20223d1e8586c51a49..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pytables_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PyTables_deps ${pkgconfig_pkg} ${python_pkg} ${cython_pkg} ${numexpr_pkg} ${numpy_pkg} ${hdf5_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/pytables_external.cmake b/CMake/cdat_modules/pytables_external.cmake
deleted file mode 100644
index 42e7c6074595aadb4b568bc5ac4f44761f234ad3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pytables_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake)
-
-# PyTables
-#
-set(PyTables_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyTables")
-ExternalProject_Add(PyTables
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${PyTables_source}
-  URL ${PYTABLES_URL}/${PYTABLES_GZ}
-  URL_MD5 ${PYTABLES_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build --hdf5=${cdat_EXTERNALS}
-  INSTALL_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --hdf5=${cdat_EXTERNALS} ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${PyTables_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pytables_pkg.cmake b/CMake/cdat_modules/pytables_pkg.cmake
deleted file mode 100644
index 22faad22ad5bd01940e54be40f6f6c543f0e5ca7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pytables_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYTABLES_MAJOR 3)
-set(PYTABLES_MINOR 1)
-set(PYTABLES_PATCH 1)
-set(PYTABLES_VERSION ${PYTABLES_MAJOR}.${PYTABLES_MINOR}.${PYTABLES_PATCH})
-set(PYTABLES_URL ${LLNL_URL} )
-set(PYTABLES_GZ tables-${PYTABLES_VERSION}.tar.gz)
-set(PYTABLES_MD5 38d917f0c6dfb0bc28ce9ea0c3492524)
-set(PYTABLES_SOURCE ${PYTABLES_URL}/${PYTABLES_GZ})
-
-add_cdat_package_dependent(PyTables "" "" OFF "NOT CDAT_BUILD_LEAN" ${CDAT_BUILD_ALL})
diff --git a/CMake/cdat_modules/python_deps.cmake b/CMake/cdat_modules/python_deps.cmake
deleted file mode 100644
index 04864b10c4b73c02689f361278d3837813604432..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/python_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Python_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg})
diff --git a/CMake/cdat_modules/python_external.cmake b/CMake/cdat_modules/python_external.cmake
deleted file mode 100644
index 0710a066308dd85c12126f88a07635728e16465f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/python_external.cmake
+++ /dev/null
@@ -1,66 +0,0 @@
-#-----------------------------------------------------------------------------
-set(proj Python)
-
-set(python_SOURCE_DIR ${cdat_BINARY_DIR}/build/Python)
-set(python_BUILD_IN_SOURCE 1)
-
-set(python_aqua_cdat no)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake
-  @ONLY)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake
-  @ONLY)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake
-  @ONLY)
-
-set(python_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_patch_step.cmake)
-
-if(APPLE)
-  set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake)
-  set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake)
-  set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake)
-else()
-  set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake)
-  set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake)
-  set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake)
-endif()
-
-ExternalProject_Add(${proj}
-  URL ${PYTHON_URL}/${PYTHON_GZ}
-  URL_MD5 ${PYTHON_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${python_SOURCE_DIR}
-  BUILD_IN_SOURCE ${python_BUILD_IN_SOURCE}
-  UPDATE_COMMAND pwd
-  CONFIGURE_COMMAND ${python_CONFIGURE_COMMAND}
-  BUILD_COMMAND ${python_BUILD_COMMAND}
-  INSTALL_COMMAND ${python_INSTALL_COMMAND}
-  DEPENDS ${Python_deps}
-  ${ep_log_options}
-)
-
-#-----------------------------------------------------------------------------
-# Set PYTHON_INCLUDE and PYTHON_LIBRARY variables
-#
-set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages)
-
-if(APPLE)
-  ExternalProject_Add_Step(${proj} change_plist_name
-    COMMAND ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/python ${cdat_CMAKE_SOURCE_DIR}/fixName.py
-    DEPENDEES install
-  )
-  set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Headers)
-  set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Python)
-  set(PYTHON_LIBRARY_DIR ${CMAKE_INSTALL_PREFIX}/lib)
-  set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python)
-  #set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}/bin/python)
-  set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/lib/python${PYVER}/site-packages)
-else()
-  set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/include/python${PYVER})
-  set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libpython${PYVER}.so)
-endif()
diff --git a/CMake/cdat_modules/python_pkg.cmake b/CMake/cdat_modules/python_pkg.cmake
deleted file mode 100644
index 36c97d702d224ce8711be50d520698267fa61c4a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/python_pkg.cmake
+++ /dev/null
@@ -1,59 +0,0 @@
-set(PYTHON_MAJOR_SRC 2)
-set(PYTHON_MINOR_SRC 7)
-set(PYTHON_PATCH_SRC 11)
-set(PYTHON_VERSION ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC})
-set(PYTHON_URL ${LLNL_URL})
-set(PYTHON_GZ Python-${PYTHON_VERSION}.tgz)
-set(PYTHON_MD5 6b6076ec9e93f05dd63e47eb9c15728b )
-set(PYVER ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC})
-set(PYTHON_SOURCE ${PYTHON_URL}/${PYTHON_GZ})
-
-add_cdat_package(Python ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC} "" "")
-
-# FIXME: Name style
-set(CDAT_OS_XTRA_PATH "")
-
-set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX})
-if (APPLE)
-  set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER})
-endif()
-set(PYTHON_SITE_PACKAGES ${PYTHON_SITE_PACKAGES_PREFIX}/lib/python${PYVER}/site-packages)
-set(PYTHONPATH ${PYTHON_SITE_PACKAGES})
-
-if (CDAT_USE_SYSTEM_PYTHON)
-   find_package(PythonInterp)
-   set(PYVER ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR})
-   # \NOTE This is required or else FindPythonLibs may find whatever version is
-   # listed first internally and if that version exists on the system. For example
-   # a system might have python version 2.6 and 2.7 both installed.
-   set(Python_ADDITIONAL_VERSIONS ${PYVER})
-   find_package(PythonLibs)
-   set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages)
-   message("[INFO] Using system python ${PYTHON_EXECUTABLE}")
-   message("[INFO] Putting packages in directory ${PYTHON_SITE_PACKAGES}")
-   set(PYTHON_EXTRA_PREFIX "--prefix=${CMAKE_INSTALL_PREFIX}")
-   message("[INFO] Setting up prefix for installing python packages into: ${PYTHON_EXTRA_PREFIX}")
-   set(ENV{LD_LIBRARY_PATH} $ENV{LD_LIBRARY_PATH})
-   set(PYTHONPATH "${PYTHON_SITE_PACKAGES}:$ENV{PYTHONPATH}")
-   set(ENV{PYTHONPATH} "${PYTHONPATH}")
-   message("[INFO] Set PYTHONPATH to $ENV{PYTHONPATH}")
-   get_filename_component(PYTHON_EXECUTABLE_PATH ${PYTHON_EXECUTABLE} PATH)
-   set(PYTHON_LIBRARY ${PYTHON_LIBRARIES})
-   message("[INFO] set PYTHON_LIBRARY TO" ${PYTHON_LIBRARY})
-   set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIRS})
-   if(APPLE)
-     set(CDAT_OS_XTRA_PATH ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin)
-   endif()
-else ()
-   set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python)
-   message("[INFO] Building python at ${PYTHON_EXECUTABLE}")
-   set(PYTHON_EXTRA_PREFIX "")
-   set(PYVER 2.7)
-   if (NOT APPLE)
-     set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/bin/easy_install)
-     set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/bin/pip)
-   else ()
-     set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/easy_install)
-     set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/pip)
-   endif()
-endif()
diff --git a/CMake/cdat_modules/pyzmq_deps.cmake b/CMake/cdat_modules/pyzmq_deps.cmake
deleted file mode 100644
index 507fc118004ea1e0ead22dd7410094e75e906b5f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyzmq_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pyzmq_deps ${zmq_pkg} ${cython_pkg})
diff --git a/CMake/cdat_modules/pyzmq_external.cmake b/CMake/cdat_modules/pyzmq_external.cmake
deleted file mode 100644
index e931ce77e870cddf322e677e9c4fc7f45a9a035c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyzmq_external.cmake
+++ /dev/null
@@ -1,50 +0,0 @@
-# The pyzmq project 
-
-set(pyzmq_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq")
-
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake @ONLY)
-
-set(pyzmq_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake)
-set(pyzmq_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake)
-
-set(pyzmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq")
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(pyzmq
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pyzmq_source}
-  BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq
-  URL ${PYZMQ_SOURCE}
-  URL_MD5 ${PYZMQ_MD5}
-  CONFIGURE_COMMAND ${pyzmq_CONFIGURE_COMMAND}
-  BUILD_COMMAND ""
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${pyzmq_INSTALL_COMMAND}
-  DEPENDS
-    ${pyzmq_deps}
-  ${ep_log_options}
-  )
-
-# pyzmq
-#
-
-#ExternalProject_Add(pyzmq
-#  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-#  SOURCE_DIR ${pyzmq_source}
-#  URL ${PYZMQ_URL}/${PYZMQ_GZ}
-#  URL_MD5 ${PYZMQ_MD5}
-#  BUILD_IN_SOURCE 1
-#  CONFIGURE_COMMAND ""
-#  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-#  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-#  DEPENDS ${pyzmq_deps}
-#  ${ep_log_options}
-#  )
diff --git a/CMake/cdat_modules/pyzmq_pkg.cmake b/CMake/cdat_modules/pyzmq_pkg.cmake
deleted file mode 100644
index dd5f0fa4617a22debfd3afadc2eda9e10221ec18..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/pyzmq_pkg.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(PYZMQ_VERSION 14.3.1)
-set(PYZMQ_URL ${LLNL_URL})
-set(PYZMQ_GZ pyzmq-${PYZMQ_VERSION}.tar.gz)
-set(PYZMQ_MD5 7196b4a6fbf98022f17ffa924be3d68d)
-set(PYZMQ_SOURCE ${PYZMQ_URL}/${PYZMQ_GZ})
-
-add_cdat_package(pyzmq "" "" OFF)
diff --git a/CMake/cdat_modules/qt4_deps.cmake b/CMake/cdat_modules/qt4_deps.cmake
deleted file mode 100644
index 8b137891791fe96927ad78e64b0aad7bded08bdc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/qt4_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/CMake/cdat_modules/qt4_pkg.cmake b/CMake/cdat_modules/qt4_pkg.cmake
deleted file mode 100644
index ee6057e89634b0160101ee7732cda003e8777ae0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/qt4_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-if (CDAT_BUILD_GRAPHICS)
-  find_package(Qt4 4.7.2 REQUIRED)
-
-  if (CDAT_BUILD_GUI)
-    if (NOT DEFINED QT_QTOPENGL_INCLUDE_DIR)
-      message(FATAL_ERROR "QT_QTOPENGL_INCLUDE_DIR is not set but required")
-    endif()
-  endif()
-endif()
-
diff --git a/CMake/cdat_modules/qt_external.cmake b/CMake/cdat_modules/qt_external.cmake
deleted file mode 100644
index 86085efc6ac72c8dceb97a0c08b0c3f7362392ad..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/qt_external.cmake
+++ /dev/null
@@ -1,66 +0,0 @@
-
-set(qt_source "${CMAKE_CURRENT_BINARY_DIR}/build/Qt")
-set(qt_install_dir "${cdat_EXTERNALS}")
-
-if(WIN32)
-  # if jom is in the path use it as it will be faster
-  find_program(JOM jom)
-  mark_as_advanced(JOM)
-  if(JOM)
-    set(qt_build_program "${JOM}")
-  else()
-    set(qt_build_program nmake)
-  endif()
-  set(qt_install_dir ${qt_source})
-  configure_file(${Titan_CMAKE_DIR}/win_config_qt.cmake.in
-    ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake )
-  set(qt_configure ${CMAKE_COMMAND}
-    -P ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake)
-  set(qt_build ${qt_build_program})
-  set(qt_install "")
-else()
-  set(qt_configure echo yes | sh configure --prefix=${qt_install_dir} -release
-    -nomake examples -nomake demos -no-audio-backend -no-multimedia 
-    -phonon -opensource)
-  if ("-m32" STREQUAL "${CMAKE_CXX_FLAGS}")
-    set(qt_configure echo yes | sh ./configure -release
-      -nomake examples -nomake demos -no-audio-backend -no-multimedia 
-      --prefix=${qt_install_dir} -opensource
-      -platform linux-g++-32)
-  endif ()
-  set(qt_build ${MAKE})
-  set(qt_install make install)
-  if(APPLE)
-    exec_program(${CMAKE_C_COMPILER} ARGS --version OUTPUT_VARIABLE
-        _gcc_version_info)
-    string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]"
-        _gcc_version "${_gcc_version_info}")
-    if(NOT _gcc_version)
-      string (REGEX REPLACE ".*\\(GCC\\).* ([34]\\.[0-9]) .*" "\\1.0"
-        _gcc_version "${_gcc_version_info}")
-    endif()
-    if(${_gcc_version} VERSION_GREATER 4.2.0)
-      # Then Qt should be built 64 bit
-      message(STATUS "Building 64 bit Qt using cocoa.")
-      set(qt_configure ${qt_configure} -arch x86_64 -cocoa)
-    else()
-      # Then Qt should be built 32 bit
-      message(STATUS "Building 32 bit Qt using carbon.")
-      set(qt_configure ${qt_configure} -arch x86 -carbon)
-    endif()
-  endif()
-endif()
-
-ExternalProject_Add(Qt
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  URL ${QT_URL}/${QT_GZ}
-  URL_MD5 ${QT_MD5}
-  SOURCE_DIR ${qt_source}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${qt_configure}
-  DEPENDS ${Qt_deps}
-  )
-
-set(QT_QMAKE_EXECUTABLE "${qt_install_dir}/bin/qmake"
-    CACHE FILEPATH "Path to qmake executable" FORCE)
-
diff --git a/CMake/cdat_modules/r_deps.cmake b/CMake/cdat_modules/r_deps.cmake
deleted file mode 100644
index a7016962f4a30c2d5f692bf9170b4afd19017d50..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/r_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(R_deps ${readline_pkg})
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND R_deps ${mpi_pkg})
-endif()
diff --git a/CMake/cdat_modules/r_external.cmake b/CMake/cdat_modules/r_external.cmake
deleted file mode 100644
index af1d2d3111c13112e2f03bd115d1ead982be9344..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/r_external.cmake
+++ /dev/null
@@ -1,51 +0,0 @@
-
-set(R_source "${CMAKE_CURRENT_BINARY_DIR}/build/R")
-set(R_install "${cdat_EXTERNALS}")
-if (APPLE)
-    message("[INFO] Building R without X support for MacOS")
-    set(WITHX "no")
-    set(WITH_AQUA "yes")
-else ()
-    set(WITHX "yes")
-    set(WITH_AQUA "no")
-endif()
-
-if (CDAT_BUILD_PARALLEL)
-  message([INFO] Enabling openmp for R)
-  set(R_OPENMP "--enable-openmp")
-else ()
-  message([INFO] Disabling openmp for R)
-  set(R_OPENMP "--disable-openmp")
-endif ()
-
-list(APPEND USR_ENVS
-  "CPPFLAGS=-I${cdat_EXTERNALS}/include $ENV{CPPFLAGS}"
-  "LDFLAGS=-L${cdat_EXTERNALS}/lib"
-  )
-ExternalProject_Add(R
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${R_source}
-  INSTALL_DIR ${R_install}
-  URL ${R_URL}/${R_GZ}
-  URL_MD5 ${R_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  DEPENDS ${R_deps}
-  CONFIGURE_COMMAND env ${USR_ENVS} <SOURCE_DIR>/configure --prefix=<INSTALL_DIR> LIBnn=lib --without-jpeglib --disable-R-framework --enable-R-shlib ${R_OPENMP} --without-cairo --without-ICU --without-system-xz --with-aqua=${WITH_AQUA} --without-tcltk --with-x=${WITHX}
-  INSTALL_COMMAND ${CMAKE_MAKE_PROGRAM}  install
-  ${ep_log_options}
-)
-if(APPLE)
-    #change id and then change dependencies.. 
-    ExternalProject_Add_Step(R InstallNameToolR 
-        COMMAND install_name_tool -id ${R_install}/lib/R/lib/libR.dylib ${R_install}/lib/R/lib/libR.dylib 
-        COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib 
-        COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRlapack.dylib ${R_install}/lib/R/lib/libRlapack.dylib 
-        COMMAND install_name_tool -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libR.dylib 
-        COMMAND install_name_tool -change libR.dylib ${R_install}/lib/R/lib/libR.dylib -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}//lib/R/lib/libRlapack.dylib 
-        DEPENDEES install 
-        WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}) 
-endif(APPLE)
-
-set(R_DIR "${R_binary}" CACHE PATH "R binary directory" FORCE)
-mark_as_advanced(R_DIR)
diff --git a/CMake/cdat_modules/r_pkg.cmake b/CMake/cdat_modules/r_pkg.cmake
deleted file mode 100644
index 8f7e53eb482aac587bc0d4ff4572442a7d52ae72..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/r_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(R_MAJOR_SRC 3)
-set(R_MINOR_SRC 2)
-set(R_PATCH_SRC 2)
-set(R_URL ${LLNL_URL})
-set(R_GZ R-${R_MAJOR_SRC}.${R_MINOR_SRC}.${R_PATCH_SRC}.tar.gz)
-set(R_MD5 57cef5c2e210a5454da1979562a10e5b)
-set(R_SOURCE ${R_URL}/${R_GZ})
-
-set (nm R)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package_dependent(R "" "Build R" ${CDAT_BUILD_ALL}
-                           "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/readline_deps.cmake b/CMake/cdat_modules/readline_deps.cmake
deleted file mode 100644
index e347b6dfb4f001b21d9d3660d807336f2e431a59..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/readline_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(readline_deps ${pkgconfig_pkg} ${curses_pkg})
diff --git a/CMake/cdat_modules/readline_external.cmake b/CMake/cdat_modules/readline_external.cmake
deleted file mode 100644
index 212f96171a3578ec5d6911636ac551c4cd9f3fc6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/readline_external.cmake
+++ /dev/null
@@ -1,31 +0,0 @@
-set(readline_source "${CMAKE_CURRENT_BINARY_DIR}/build/readline")
-set(readline_install "${cdat_EXTERNALS}")
-set(readline_conf_args)
-
-set(readline_conf_args "--with-curses;--disable-static;--enable-shared")
-# with -fPIC
-IF(UNIX AND NOT WIN32)
-  FIND_PROGRAM(CMAKE_UNAME uname /bin /usr/bin /usr/local/bin )
-  IF(CMAKE_UNAME)
-    EXEC_PROGRAM(uname ARGS -m OUTPUT_VARIABLE CMAKE_SYSTEM_PROCESSOR)
-    SET(CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR} CACHE INTERNAL
-"processor type (i386 and x86_64)")
-    IF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
-      set(readline_conf_args "CFLAGS=-fPIC" ${readline_conf_args})
-    ENDIF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
-  ENDIF(CMAKE_UNAME)
-ENDIF(UNIX AND NOT WIN32)
-
-ExternalProject_Add(readline
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${readline_source}
-  INSTALL_DIR ${readline_install}
-  URL ${READLINE_URL}/${READLINE_GZ}
-  URL_MD5 ${READLINE_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/readline/shobj-conf ${readline_source}/support/shobj-conf
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${readline_conf_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${readline_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/readline_pkg.cmake b/CMake/cdat_modules/readline_pkg.cmake
deleted file mode 100644
index 86eb2679c723fde7718d3b322c120478320845da..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/readline_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(RL_MAJOR 5)
-set(RL_MINOR 2)
-set(RL_MAJOR_SRC 6)
-set(RL_MINOR_SRC 2)
-set(READLINE_URL ${LLNL_URL})
-set(READLINE_GZ readline-${RL_MAJOR_SRC}.${RL_MINOR_SRC}.tar.gz)
-set(READLINE_MD5 67948acb2ca081f23359d0256e9a271c)
-set(READLINE_VERSION ${RL_MAJOR_SRC}.${RL_MINOR_SRC})
-set(READLINE_SOURCE ${READLINE_URL}/${READLINE_GZ})
-
-add_cdat_package(readline "" "" OFF)
diff --git a/CMake/cdat_modules/rpy2_deps.cmake b/CMake/cdat_modules/rpy2_deps.cmake
deleted file mode 100644
index 3c3d4d90c6f5495a3c65ffe215fa45671f4fcdb6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/rpy2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(RPY2_deps ${python_pkg} ${pip_pkg} ${r_pkg} ${six_pkg} ${singledispatch_pkg} ${windfield_pkg})
diff --git a/CMake/cdat_modules/rpy2_external.cmake b/CMake/cdat_modules/rpy2_external.cmake
deleted file mode 100644
index d408ae22cde6a2b0d7616698e2eed7ecebbee0a0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/rpy2_external.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-# create an external project to install RPY2,
-# and configure and build it
-set(nm RPY2)
-
-# Set PATH and R_HOME to find R
-list(APPEND USR_ENVS
-  "R_HOME=${cdat_EXTERNALS}/lib/R"
-  "PATH=${cdat_EXTERNALS}/bin:$ENV{PATH}"
-  )
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/rpy2_pkg.cmake b/CMake/cdat_modules/rpy2_pkg.cmake
deleted file mode 100644
index 5447bd3fb67957b825f6f43902eb2cd796fed8a9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/rpy2_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(RPY2_MAJOR_SRC 2)
-set(RPY2_MINOR_SRC 6)
-set(RPY2_PATCH_SRC 0)
-
-set(RPY2_VERSION ${RPY2_MAJOR_SRC}.${RPY2_MINOR_SRC}.${RPY2_PATCH_SRC})
-set(RPY2_GZ rpy2-${RPY2_VERSION}.tar.gz)
-set(RPY2_SOURCE ${LLNL_URL}/${RPY2_GZ})
-set(RPY2_MD5 679898fbc832d4f05a5efcf1a7eb1a68)
-
-add_cdat_package_dependent(RPY2 "" "" ${CDAT_BUILD_ALL}
-                           "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/sampledata_deps.cmake b/CMake/cdat_modules/sampledata_deps.cmake
deleted file mode 100644
index 785ca373e155652b0ffdd71797920b3f18b7c478..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sampledata_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(sampledata_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/sampledata_external.cmake b/CMake/cdat_modules/sampledata_external.cmake
deleted file mode 100644
index 7b9a7027fd58fc2dc989c0a25ca4d693dda24a96..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sampledata_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-if (CDAT_DOWNLOAD_SAMPLE_DATA)
-  message("[INFO] ------------------------------------------------------------------------------------------------------------------------------")
-  configure_file(
-    "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_download_sample_data.cmake.in"
-    "${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake"
-    @ONLY
-    )
-  set(sampledata_cmd ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake)
-  ExternalProject_Add(sampledata
-    SOURCE_DIR ${cdat_SOURCE_DIR}/Packages/dat
-    CONFIGURE_COMMAND ${sampledata_cmd}
-    BUILD_COMMAND ""
-    INSTALL_COMMAND ""
-    DEPENDS ${sampledata_deps}
-    ${ep_log_options}
-    )
-endif()
diff --git a/CMake/cdat_modules/sampledata_pkg.cmake b/CMake/cdat_modules/sampledata_pkg.cmake
deleted file mode 100644
index 821414e964f6de5b0ffbffac79033ddbc171fe8d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sampledata_pkg.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-
-add_cdat_package(sampledata "" "" ON)
diff --git a/CMake/cdat_modules/scientificpython_deps.cmake b/CMake/cdat_modules/scientificpython_deps.cmake
deleted file mode 100644
index 8116fccd08fe0273552ca2657ca1bb235045d547..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scientificpython_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-set(scientificpython_deps ${numpy_pkg} ${netcdf_pkg} ${cdat_pkg} ${pip_pkg})
-
diff --git a/CMake/cdat_modules/scientificpython_external.cmake b/CMake/cdat_modules/scientificpython_external.cmake
deleted file mode 100644
index ecd5c2c9a34eeadd1ddb25ca5b8bb97e41a97606..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scientificpython_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install ScientificPython
-# and configure and build it
-set(nm scientificpython)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/scientificpython_pkg.cmake b/CMake/cdat_modules/scientificpython_pkg.cmake
deleted file mode 100644
index 206cdd7ca87664796913fbac0fff6bdabcf9f0cd..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scientificpython_pkg.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(SCIENTIFICPYTHON_VERSION 2.8)
-set(SCIENTIFICPYTHON_URL ${LLNL_URL})
-set(SCIENTIFICPYTHON_GZ ScientificPython-${SCIENTIFICPYTHON_VERSION}.tar.gz)
-set(SCIENTIFICPYTHON_SOURCE ${SCIENTIFICPYTHON_URL}/${SCIENTIFICPYTHON_GZ})
-set(SCIENTIFICPYTHON_MD5 b87dd2b2c4be6b5421d906d39bcc59a7 )
-
-add_cdat_package_dependent(scientificpython "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/scikits_deps.cmake b/CMake/cdat_modules/scikits_deps.cmake
deleted file mode 100644
index 858e900f72e7fe68e9b212c70ebbe2c36f181ed3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scikits_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(scikits_deps ${pip_pkg} ${scipy_pkg})
diff --git a/CMake/cdat_modules/scikits_external.cmake b/CMake/cdat_modules/scikits_external.cmake
deleted file mode 100644
index eeff0fa0135a7c7d185bfe4e9056d2dbd3bb15ae..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scikits_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm scikits)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/scikits_pkg.cmake b/CMake/cdat_modules/scikits_pkg.cmake
deleted file mode 100644
index 83d79a2500760a58bf29873dd32629874b4238ec..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scikits_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(SCIKITS_MAJOR_SRC 0)
-set(SCIKITS_MINOR_SRC 12)
-set(SCIKITS_URL ${LLNL_URL})
-set(SCIKITS_GZ scikit-learn-${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC}.tar.gz)
-set(SCIKITS_MD5 0e1f6c60b43a4f447bf363583c1fc204 )
-set(SCIKITS_VERSION ${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC})
-set(SCIKITS_SOURCE ${SCIKITS_URL}/${SCIKITS_GZ})
-
-
-add_cdat_package_dependent(scikits "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/scipy_deps.cmake b/CMake/cdat_modules/scipy_deps.cmake
deleted file mode 100644
index f7ca69d033412c709eb2bb7d20a3b0e7b1e159f8..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scipy_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(SCIPY_deps ${numpy_pkg} ${cython_pkg})
diff --git a/CMake/cdat_modules/scipy_external.cmake b/CMake/cdat_modules/scipy_external.cmake
deleted file mode 100644
index ebd0ca9e791eb20fedd83a110743287afe39e099..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scipy_external.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-# The Scipy external project 
-
-set(SCIPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/SCIPY")
-
-# to configure scipy we run a cmake -P script
-# the script will create a site.cfg file
-# then run python setup.py config to verify setup
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake @ONLY)
-# to build scipy we also run a cmake -P script.
-# the script will set LD_LIBRARY_PATH so that 
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake @ONLY)
-
-set(SCIPY_CONFIGURE_COMMAND ${CMAKE_COMMAND}
-    -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -DCDAT_USE_SYSTEM_LAPACK:STRING=${CDAT_USE_SYSTEM_LAPACK} -DLAPACK_LIBRARIES:STRING=${LAPACK_LIBRARIES} -DBLAS_LIBRARIES:STRING=${BLAS_LIBRARIES} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake)
-set(SCIPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake)
-set(SCIPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake)
-
-# create an external project to download scipy,
-# and configure and build it
-ExternalProject_Add(SCIPY
-  URL ${SCIPY_URL}/${SCIPY_GZ}
-  URL_MD5 ${SCIPY_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${SCIPY_binary}
-  BINARY_DIR ${SCIPY_binary}
-  CONFIGURE_COMMAND ${SCIPY_CONFIGURE_COMMAND}
-  BUILD_COMMAND ${SCIPY_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${SCIPY_INSTALL_COMMAND}
-  DEPENDS
-    ${SCIPY_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/scipy_pkg.cmake b/CMake/cdat_modules/scipy_pkg.cmake
deleted file mode 100644
index e582aecb6fd6abd9da26d1a7deca337a47f7e82c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/scipy_pkg.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-set(SCIPY_MAJOR 0)
-set(SCIPY_MINOR 17)
-set(SCIPY_PATCH 0)
-set(SCIPY_MAJOR_SRC 0)
-set(SCIPY_MINOR_SRC 17)
-set(SCIPY_PATCH_SRC 0)
-set(SCIPY_URL ${LLNL_URL})
-set(SCIPY_GZ scipy-${SCIPY_MAJOR_SRC}.${SCIPY_MINOR_SRC}.${SCIPY_PATCH_SRC}.tar.gz)
-set(SCIPY_MD5 298ca04ade82814b17f5cd2d9d4c7b70)
-set(SCIPY_SOURCE ${SCIPY_URL}/${SCIPY_GZ})
-
-set (nm SCIPY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-
-add_cdat_package_dependent(SCIPY "" "" OFF "CDAT_BUILD_LEAN" ON)
-#if (CDAT_BUILD_ALL)
-#  add_cdat_package(scipy "" "" ON)
-#else()
-#  add_cdat_package(scipy "" "" OFF)
-#endif()
diff --git a/CMake/cdat_modules/seawater_deps.cmake b/CMake/cdat_modules/seawater_deps.cmake
deleted file mode 100644
index d8ca102702a7df0d2a8b5f841e92c474267126c5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/seawater_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(seawater_deps ${python_pkg} ${numpy_pkg})
diff --git a/CMake/cdat_modules/seawater_external.cmake b/CMake/cdat_modules/seawater_external.cmake
deleted file mode 100644
index a92c31447afe4cbe736bc0d041b446868404c158..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/seawater_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# seawater
-#
-set(seawater_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/seawater")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/seawater_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake"
-  @ONLY
-)
-
-set(seawater_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake)
-
-ExternalProject_Add(seawater
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${seawater_source_dir}
-  URL ${SEAWATER_URL}/${SEAWATER_GZ}
-  URL_MD5 ${SEAWATER_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${seawater_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${seawater_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/seawater_pkg.cmake b/CMake/cdat_modules/seawater_pkg.cmake
deleted file mode 100644
index 81bde3ba704086b6cc78a6c42ed4986a9784cfc9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/seawater_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(SEAWATER_MAJOR 3)
-set(SEAWATER_MINOR 3)
-set(SEAWATER_PATCH 4)
-set(SEAWATER_VERSION ${SEAWATER_MAJOR}.${SEAWATER_MINOR}.${SEAWATER_PATCH})
-set(SEAWATER_URL ${LLNL_URL})
-set(SEAWATER_GZ python-seawater-${SEAWATER_VERSION}.tar.gz)
-set(SEAWATER_MD5 0932193350f42c055e7f523578ec1b7c)
-
-set (nm SEAWATER)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(SEAWATER_SOURCE ${SEAWATER_URL}/${SEAWATER_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(seawater "" "" ON)
-else()
-  add_cdat_package(seawater "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/setuptools_deps.cmake b/CMake/cdat_modules/setuptools_deps.cmake
deleted file mode 100644
index 9e3879e6b4f60c532fc63a36c78e88750aab6d99..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/setuptools_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(setuptools_deps ${pkgconfig_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/setuptools_external.cmake b/CMake/cdat_modules/setuptools_external.cmake
deleted file mode 100644
index cbea071a40a24c98eb9cf4dd94d33ad5ae55e9fc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/setuptools_external.cmake
+++ /dev/null
@@ -1,38 +0,0 @@
-set(setuptools_source "${CMAKE_CURRENT_BINARY_DIR}/build/setuptools")
-set(setuptools_install "${cdat_EXTERNALS}")
-
-# 2012-03-19 C. Doutriaux Commented this out seems to not be able to pick pythonpath and ldlibrarypath
-# Seems to be way too complicated for what's  really needed
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_make_step.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake
-#  @ONLY)
-
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_install_step.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake
-#  @ONLY)
-
-#set(setuptools_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake)
-#set(setuptools_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake)
-  
-
-# old cmnd
-#  BUILD_COMMAND 
-#  INSTALL_COMMAND ${setuptools_install_command}
-
-ExternalProject_Add(setuptools
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${setuptools_source}
-  INSTALL_DIR ${setuptools_install}
-  URL ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ}
-  URL_MD5 ${SETUPTOOLS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${setuptools_deps}
-  ${ep_log_options}
-)
-
-set(setuptools_DIR "${setuptools_binary}" CACHE PATH "setuptools binary directory" FORCE)
-mark_as_advanced(setuptools_DIR)
diff --git a/CMake/cdat_modules/setuptools_pkg.cmake b/CMake/cdat_modules/setuptools_pkg.cmake
deleted file mode 100644
index 97c8e93f7bc823d7bd6b00a4172f70561bdb9500..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/setuptools_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(SETUPTOOLS_MAJOR_SRC 19)
-set(SETUPTOOLS_MINOR_SRC 2)
-set(SETUPTOOLS_PATCH_SRC '')
-set(SETUPTOOLS_URL ${LLNL_URL})
-set(SETUPTOOLS_GZ setuptools-${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC}.tar.gz)
-set(SETUPTOOLS_MD5 78353b1f80375ca5e088f4b4627ffe03)
-set(SETUPTOOLS_VERSION ${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC})
-set(SETUPTOOLS_SOURCE ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ})
-
-add_cdat_package(setuptools "" "" OFF)
diff --git a/CMake/cdat_modules/shapely_deps.cmake b/CMake/cdat_modules/shapely_deps.cmake
deleted file mode 100644
index e4cf1bcff10fabcc20e0b6bdb0852fce9b7a7852..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/shapely_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Shapely_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${geos_pkg})
diff --git a/CMake/cdat_modules/shapely_external.cmake b/CMake/cdat_modules/shapely_external.cmake
deleted file mode 100644
index a04192050ec60ad2d9f87700d46db67fce88174d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/shapely_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm Shapely)
-set(USR_ENVS "GEOS_CONFIG=${cdat_EXTERNALS}/bin/geos-config")
-include(pipinstaller)
diff --git a/CMake/cdat_modules/shapely_pkg.cmake b/CMake/cdat_modules/shapely_pkg.cmake
deleted file mode 100644
index 1155206523fa389760d62a6f9def6b886b88f83d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/shapely_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set( SHAPELY_MAJOR_SRC 1  )
-set( SHAPELY_MINOR_SRC 5 )
-set( SHAPELY_PATCH_SRC 13  )
-set(SHAPELY_URL ${LLNL_URL})
-set(SHAPELY_GZ
-    Shapely-${SHAPELY_MAJOR_SRC}.${SHAPELY_MINOR_SRC}.${SHAPELY_PATCH_SRC}.tar.gz)
-set(SHAPELY_MD5 5ee549862ae84326f5f5525bbd0b8a50)
-set(SHAPELY_SOURCE ${SHAPELY_URL}/${SHAPELY_GZ})
-
-set (nm SHAPELY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-if (CDAT_BUILD_ALL)
-  add_cdat_package(Shapely "" "" ON)
-else()
-  add_cdat_package(Shapely "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/singledispatch_deps.cmake b/CMake/cdat_modules/singledispatch_deps.cmake
deleted file mode 100644
index 5ad0c5ed4fcb4a273802c52824624b41703d0613..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/singledispatch_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(singledispatch_deps ${python_pkg} ${setuptools_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/singledispatch_external.cmake b/CMake/cdat_modules/singledispatch_external.cmake
deleted file mode 100644
index 893edf6ae1935deac1d7e753a37c56b053666d80..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/singledispatch_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# The singledispatch project
-
-set(singledispatch_binary "${CMAKE_CURRENT_BINARY_DIR}/build/singledispatch")
-
-ExternalProject_Add(singledispatch
-  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-  SOURCE_DIR ${singledispatch_binary}
-  URL ${SINGLEDISPATCH_SOURCE}
-  URL_MD5 ${SINGLEDISPATCH_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${singledispatch_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/singledispatch_pkg.cmake b/CMake/cdat_modules/singledispatch_pkg.cmake
deleted file mode 100644
index c5eb273acb633eb853c1b553a762aa68f002b20a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/singledispatch_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set( SINGLEDISPATCH_MAJOR 3 )
-set( SINGLEDISPATCH_MINOR 4 )
-set( SINGLEDISPATCH_PATCH 0.3)
-set( SINGLEDISPATCH_VERSION ${SINGLEDISPATCH_MAJOR}.${SINGLEDISPATCH_MINOR}.${SINGLEDISPATCH_PATCH} )
-set( SINGLEDISPATCH_URL ${LLNL_URL} )
-set( SINGLEDISPATCH_GZ singledispatch-${SINGLEDISPATCH_VERSION}.tar.gz )
-set( SINGLEDISPATCH_MD5 af2fc6a3d6cc5a02d0bf54d909785fcb )
-
-set (nm SINGLEDISPATCH)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(SINGLEDISPATCH_SOURCE ${SINGLEDISPATCH_URL}/${SINGLEDISPATCH_GZ})
-
-if (BUILD_TESTING)
-  add_cdat_package(singledispatch "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/sip_deps.cmake b/CMake/cdat_modules/sip_deps.cmake
deleted file mode 100644
index ee888d43540eb24b19ae968c0872e83a356bf1ff..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sip_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(SIP_deps ${pkgconfig_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/sip_external.cmake b/CMake/cdat_modules/sip_external.cmake
deleted file mode 100644
index bbf4f461ff7cec29682e3b9af0307ccff085ca04..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sip_external.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(SIP_configure_command ${PYTHON_EXECUTABLE} ${cdat_BINARY_DIR}/build/SIP/configure.py -b ${CMAKE_INSTALL_PREFIX}/bin -d ${PYTHON_SITE_PACKAGES} -e ${CMAKE_INSTALL_PREFIX}/include -v ${CMAKE_INSTALL_PREFIX}/share CC=${CMAKE_C_COMPILER} CXX=${CMAKE_CXX_COMPILER})
-
-ExternalProject_Add(SIP
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  URL ${SIP_URL}/${SIP_GZ}
-  URL_MD5 ${SIP_MD5}
-  SOURCE_DIR ${cdat_BINARY_DIR}/build/SIP
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${SIP_configure_command}
-  DEPENDS ${SIP_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/sip_pkg.cmake b/CMake/cdat_modules/sip_pkg.cmake
deleted file mode 100644
index c2beefbd3b9fd77240886c3e22e0c65aad13e421..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sip_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(SIP_MAJOR 4)
-set(SIP_MINOR 12)
-set(SIP_PATCH 1)
-set(SIP_MAJOR_SRC 4)
-set(SIP_MINOR_SRC 16)
-set(SIP_PATCH_SRC 4)
-set(SIP_VERSION ${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC})
-set(SIP_URL http://www.riverbankcomputing.com/static/Downloads/sip${SIP_MAJOR_SRC})
-set(SIP_URL ${LLNL_URL})
-set(SIP_GZ sip-${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC}.tar.gz)
-set(SIP_MD5 a9840670a064dbf8f63a8f653776fec9 )
-set(SIP_SOURCE ${SIP_URL}/${SIP_GZ})
-
-add_cdat_package_dependent(SIP "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/six_deps.cmake b/CMake/cdat_modules/six_deps.cmake
deleted file mode 100644
index 20fb4f54fdd10d8a9480d9d5420a2549bb1e836e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/six_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(SIX_deps ${python_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/six_external.cmake b/CMake/cdat_modules/six_external.cmake
deleted file mode 100644
index 5a1ae27de42690898a778f8fc03498a9a751cf39..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/six_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm SIX)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/six_pkg.cmake b/CMake/cdat_modules/six_pkg.cmake
deleted file mode 100644
index e8daac58a62e774f1d1812a1a1d1079ab884f0cc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/six_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(SIX_MAJOR_SRC 1)
-set(SIX_MINOR_SRC 9)
-set(SIX_PATCH_SRC 0)
-
-set(SIX_VERSION ${SIX_MAJOR_SRC}.${SIX_MINOR_SRC}.${SIX_PATCH_SRC})
-set(SIX_GZ six-${SIX_VERSION}.tar.gz)
-set(SIX_SOURCE ${LLNL_URL}/${SIX_GZ})
-set(SIX_MD5 476881ef4012262dfc8adc645ee786c4)
-
-add_cdat_package_dependent(SIX "" "" ON "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/sphinx_deps.cmake b/CMake/cdat_modules/sphinx_deps.cmake
deleted file mode 100644
index 8e0e9f2a19d7d86e4bcda6627e0c5163a1e23708..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sphinx_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Sphinx_deps ${pip_pkg} ${docutils_pkg})
diff --git a/CMake/cdat_modules/sphinx_external.cmake b/CMake/cdat_modules/sphinx_external.cmake
deleted file mode 100644
index 41cf3d2c1feb91504c241fbed63b8001a33bf2d4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sphinx_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm Sphinx)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/sphinx_pkg.cmake b/CMake/cdat_modules/sphinx_pkg.cmake
deleted file mode 100644
index 536d6e042e6a982bb2443aaac28b0eab95e72974..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/sphinx_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(SPHINX_MAJOR_SRC 1)
-set(SPHINX_MINOR_SRC 2)
-set(SPHINX_PATCH_SRC 2)
-
-set (nm SPHINX)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(SPHINX_URL ${LLNL_URL})
-set(SPHINX_GZ Sphinx-${SPHINX_VERSION}.tar.gz)
-set(SPHINX_SOURCE ${SPHINX_URL}/${SPHINX_GZ})
-set(SPHINX_MD5 3dc73ccaa8d0bfb2d62fb671b1f7e8a4)
-
-add_cdat_package_dependent(Sphinx "" "" OFF "CDAT_BUILD_GUI" OFF)
-
diff --git a/CMake/cdat_modules/spyder_deps.cmake b/CMake/cdat_modules/spyder_deps.cmake
deleted file mode 100644
index b543e68ade42e6c5230d39398adc7a2403649b49..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/spyder_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(spyder_deps ${pyzmq_pkg} ${pyqt_pkg} ${python_pkg} ${pip_pkg} ${numpy_pkg} ${scipy_pkg} ${sphinx_pkg} ${matplotlib_pkg})
diff --git a/CMake/cdat_modules/spyder_external.cmake b/CMake/cdat_modules/spyder_external.cmake
deleted file mode 100644
index dede73c00202304b58e291a38cff9589e2c8eb51..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/spyder_external.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-# create an external project to install spyder,
-# and configure and build it
-
-set (nm spyder)
-set(OLD "OFF")
-include(pipinstaller)
-
diff --git a/CMake/cdat_modules/spyder_pkg.cmake b/CMake/cdat_modules/spyder_pkg.cmake
deleted file mode 100644
index 664f2c3198708daaf71936865a8ac56cb49ae88f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/spyder_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(SPYDER_MAJOR_SRC 2)
-set(SPYDER_MINOR_SRC 3)
-set(SPYDER_PATCH_SRC 8)
-set(SPYDER_URL ${LLNL_URL})
-set(SPYDER_ZIP spyder-${SPYDER_MAJOR_SRC}.${SPYDER_MINOR_SRC}.${SPYDER_PATCH_SRC}.zip)
-set(SPYDER_SOURCE ${SPYDER_URL}/${SPYDER_ZIP})
-set(SPYDER_MD5 fb890dc956f606c43d560558159f3491)
-
-add_cdat_package_dependent(spyder "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/tcltk_deps.cmake b/CMake/cdat_modules/tcltk_deps.cmake
deleted file mode 100644
index 4f4bf38e9a0deeba92af0ca254d61b781692f1f2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tcltk_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(TclTk_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg})
diff --git a/CMake/cdat_modules/tcltk_external.cmake b/CMake/cdat_modules/tcltk_external.cmake
deleted file mode 100644
index 9c8baa5f6e14ac3fbca951fccc373ae290576c49..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tcltk_external.cmake
+++ /dev/null
@@ -1,62 +0,0 @@
-
-set(tcl_source "${CMAKE_CURRENT_BINARY_DIR}/build/tcl")
-set(tk_source "${CMAKE_CURRENT_BINARY_DIR}/build/tk")
-set(tcltk_install "${cdat_EXTERNALS}")
-
-set(tcltk_configure_args --enable-shared)
-
-# tcl
-#
-set(proj tcl-${TCLTK_MAJOR}.${TCLTK_MINOR})
-
-ExternalProject_Add(${proj}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${tcl_source}
-  INSTALL_DIR ${tcltk_install}
-  URL ${TCLTK_URL}/${TCL_GZ}
-  URL_MD5 ${TCL_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR>/unix -DCONFIGURE_ARGS=${tcltk_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${TclTk_deps}
-  ${ep_log_options}
-)
-
-# tk
-#
-set(proj tk-${TCLTK_MAJOR}.${TCLTK_MINOR})
-
-ExternalProject_Add(${proj}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${tk_source}
-  INSTALL_DIR ${tcltk_install}
-  URL ${TCLTK_URL}/${TK_GZ}
-  URL_MD5 ${TK_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS tcl-${TCLTK_MAJOR}.${TCLTK_MINOR}
-  ${ep_log_options}
-)
-
-ExternalProject_Add_Step(${proj} symlink
-  COMMAND ${CMAKE_COMMAND} -E create_symlink "wish${TCLTK_MAJOR}.${TCLTK_MINOR}" wish
-  WORKING_DIRECTORY ${tcltk_install}/bin
-  COMMENT "Linking wish${TCLTK_MAJOR}.${TCLTK_MINOR} to wish"
-  DEPENDEES install
-)
-
-# tcltk
-#
-
-ExternalProject_Add(TclTk
-  DOWNLOAD_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ""
-  DEPENDS tk-${TCLTK_MAJOR}.${TCLTK_MINOR}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/tcltk_pkg.cmake b/CMake/cdat_modules/tcltk_pkg.cmake
deleted file mode 100644
index 1296043e2d67fd774fff88f32effcf49d2b19809..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tcltk_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(TCLTK_MAJOR 8)
-set(TCLTK_MINOR 5)
-set(TCLTK_PATCH 9)
-set(TCLTK_VERSION ${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH})
-set(TCLTK_URL ${LLNL_URL})
-set(TCL_GZ tcl${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz)
-set(TK_GZ tk${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz)
-set(TCL_MD5 8512d8db3233041dd68a81476906012a)
-set(TK_MD5 7cdeb9feb61593f58a0ae61f2497580e)
-# Two sources here, need to fake it
-set(TCLTK_SOURCE "${TCLTK_URL}/${TCL_GZ} ${TCL_MD5}")
-set(TCLTK_MD5 "${TCLTK_URL}/${TK_GZ} ${TK_MD5}")
-
-if (CDAT_BUILD_ESGF)
-    add_cdat_package(TclTk "" "" OFF)
-else()
-    add_cdat_package_dependent(TclTk "" "" OFF "CDAT_BUILD_GUI" OFF)
-endif()
diff --git a/CMake/cdat_modules/termcap_deps.cmake b/CMake/cdat_modules/termcap_deps.cmake
deleted file mode 100644
index 3c9a6f3aff57d6306e48d61b83e1042987de3ddd..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/termcap_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(termcap_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/termcap_external.cmake b/CMake/cdat_modules/termcap_external.cmake
deleted file mode 100644
index cf57c940a4078e857f036218fc984cbd76df02cb..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/termcap_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(termcap_source "${CMAKE_CURRENT_BINARY_DIR}/build/termcap")
-set(termcap_install "${cdat_EXTERNALS}")
-set(termcap_conf_args)
-
-ExternalProject_Add(termcap
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${termcap_source}
-  INSTALL_DIR ${termcap_install}
-  URL ${TCAP_URL}/${TCAP_GZ}
-  URL_MD5 ${TCAP_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${termcap_conf_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${termcap_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/termcap_pkg.cmake b/CMake/cdat_modules/termcap_pkg.cmake
deleted file mode 100644
index 11e6a0e9286a370bc841a7a0c80dcee2e6ec3d54..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/termcap_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(TCAP_MAJOR_SRC 1)
-set(TCAP_MINOR_SRC 3)
-set(TCAP_PATCH_SRC 1)
-set(TCAP_URL ${LLNL_URL})
-set(TCAP_GZ termcap-${TCAP_MAJOR_SRC}.${TCAP_MINOR_SRC}.${TCAP_PATCH_SRC}.tar.gz)
-set(TCAP_MD5 ffe6f86e63a3a29fa53ac645faaabdfa)
-set(TERMCAP_SOURCE ${TCAP_URL}/${TCAP_GZ})
-set(TERMCAP_MD5 ${TCAP_MD5})
-
-set (nm TCAP)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(TERMCAP_VERSION ${TCAP_VERSION})
-
-add_cdat_package(termcap "" "" OFF)
-
diff --git a/CMake/cdat_modules/tiff_deps.cmake b/CMake/cdat_modules/tiff_deps.cmake
deleted file mode 100644
index 3a05e71e9619c24b7eb5e227c1413a5ba4c89633..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tiff_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(tiff_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/tiff_external.cmake b/CMake/cdat_modules/tiff_external.cmake
deleted file mode 100644
index 248a9929d3831b90e47e1779022a9362adff1f4d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tiff_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(tiff_source "${CMAKE_CURRENT_BINARY_DIR}/build/tiff")
-set(tiff_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(tiff
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${tiff_source}
-  INSTALL_DIR ${tiff_install}
-  URL ${TIFF_URL}/${TIFF_GZ}
-  URL_MD5 ${TIFF_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${tiff_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/tiff_pkg.cmake b/CMake/cdat_modules/tiff_pkg.cmake
deleted file mode 100644
index 09a6a191a70848c19e44b82d528b5690c27cba8a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tiff_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(TIFF_MAJOR 3)
-set(TIFF_MINOR 9)
-set(TIFF_PATCH 4)
-set(TIFF_URL ${LLNL_URL})
-set(TIFF_GZ tiff-${TIFF_MAJOR}.${TIFF_MINOR}.${TIFF_PATCH}.tar.gz)
-set(TIFF_MD5 2006c1bdd12644dbf02956955175afd6)
-set(TIFF_SOURCE ${TIFF_URL}/${TIFF_GZ})
-
-set (nm TIFF)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-add_cdat_package_dependent(tiff "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/tornado_deps.cmake b/CMake/cdat_modules/tornado_deps.cmake
deleted file mode 100644
index 6c8e9f67da6755a8660e5ce45e0b46b968022a3d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tornado_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(tornado_deps ${spyder_pkg} ${pyzmq_deps} ${pip_pkg})
diff --git a/CMake/cdat_modules/tornado_external.cmake b/CMake/cdat_modules/tornado_external.cmake
deleted file mode 100644
index 3531582b0c3c6741e00c4b74a16a1f458504d812..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tornado_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm tornado)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/tornado_pkg.cmake b/CMake/cdat_modules/tornado_pkg.cmake
deleted file mode 100644
index a40c77381b6f101b68b96c29ba71ae8e2b138ae3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/tornado_pkg.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(TORNADO_VERSION 3.1)
-set(TORNADO_URL ${LLNL_URL})
-set(TORNADO_GZ tornado-${TORNADO_VERSION}.tar.gz)
-set(TORNADO_SOURCE ${TORNADO_URL}/${TORNADO_GZ})
-set(TORNADO_MD5 2348d626095c5675753287e9af0c321f )
-
-add_cdat_package(tornado "" "" OFF)
diff --git a/CMake/cdat_modules/udunits2_deps.cmake b/CMake/cdat_modules/udunits2_deps.cmake
deleted file mode 100644
index b032ce41d58112f82465a9e7019c878a2ca04fe5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/udunits2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(udunits2_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/udunits2_external.cmake b/CMake/cdat_modules/udunits2_external.cmake
deleted file mode 100644
index c70b20fd93c5bb7fbbda716231797c3aeaf20511..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/udunits2_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-set(udunits_source "${CMAKE_CURRENT_BINARY_DIR}/build/udunits2")
-set(udunits_install "${cdat_EXTERNALS}")
-
-set(udunits_patch_command "")
-if(APPLE)
-  # Modified configure file to workaround random flex failures
-  set(udunits_patch_command
-    ${CMAKE_COMMAND} -E copy_if_different
-      "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/udunits2_apple_configure.in"
-      "${udunits_source}/configure")
-endif()
-
-ExternalProject_Add(udunits2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${udunits_source}
-  INSTALL_DIR ${udunits_install}
-  URL ${UDUNITS2_URL}/${UDUNITS2_GZ}
-  URL_MD5 ${UDUNITS2_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${udunits_patch_command}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${udunits2_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/udunits2_pkg.cmake b/CMake/cdat_modules/udunits2_pkg.cmake
deleted file mode 100644
index b114ac7707cc4959dd3029bfb0ed68388232d91c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/udunits2_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(UDUNITS2_URL ${LLNL_URL})
-set(UDUNITS2_MAJOR_SRC 2)
-set(UDUNITS2_MINOR_SRC 2)
-set(UDUNITS2_PATCH_SRC 17)
-set(UDUNITS2_URL ${LLNL_URL})
-set(UDUNITS2_GZ udunits-${UDUNITS2_MAJOR_SRC}.${UDUNITS2_MINOR_SRC}.${UDUNITS2_PATCH_SRC}.tar.gz)
-set(UDUNITS2_MD5 b81ab8f24125ce18702ab7b3ca4d566f )
-set(UDUNITS2_SOURCE ${UDUNITS2_URL}/${UDUNITS2_GZ})
-
-set (nm UDUNITS2)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-
-add_cdat_package_dependent(udunits2 "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/uuid_deps.cmake b/CMake/cdat_modules/uuid_deps.cmake
deleted file mode 100644
index 2f2b9e4ba1936b3a6723d83b91b60a4129de13ba..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/uuid_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(uuid_deps ${pkgconfig_pkg} )
diff --git a/CMake/cdat_modules/uuid_external.cmake b/CMake/cdat_modules/uuid_external.cmake
deleted file mode 100644
index a53deeb799dad1bbd4ce5933cea6e9b7dc469a9c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/uuid_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-
-set(uuid_source "${CMAKE_CURRENT_BINARY_DIR}/build/uuid")
-set(uuid_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(uuid
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${uuid_source}
-  INSTALL_DIR ${uuid_install}
-  URL ${UUID_URL}/${UUID_GZ}
-  URL_MD5 ${UUID_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${uuid_deps}
-  ${ep_log_options}
-)
-
-set(uuid_DIR "${uuid_binary}" CACHE PATH "uuid binary directory" FORCE)
-mark_as_advanced(uuid_DIR)
diff --git a/CMake/cdat_modules/uuid_pkg.cmake b/CMake/cdat_modules/uuid_pkg.cmake
deleted file mode 100644
index d05bfb620aa7d2963e4ec5c94aeffd7ff356dce7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/uuid_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(UUID_MAJOR_SRC 1)
-set(UUID_MINOR_SRC 6)
-set(UUID_PATCH_SRC 2)
-set(UUID_URL ${LLNL_URL})
-set(UUID_GZ uuid-${UUID_MAJOR_SRC}.${UUID_MINOR_SRC}.${UUID_PATCH_SRC}.tar.gz)
-set(UUID_MD5 5db0d43a9022a6ebbbc25337ae28942f)
-set(UUID_SOURCE ${UUID_URL}/${UUID_GZ})
-
-set (nm UUID)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-#apparently libcf needs it
-add_cdat_package_dependent(uuid "" "" OFF "CDAT_BUILD_LEAN" ON)
-#add_cdat_package(uuid "" "" OFF)
-
diff --git a/CMake/cdat_modules/uvcmetrics_deps.cmake b/CMake/cdat_modules/uvcmetrics_deps.cmake
deleted file mode 100644
index a01e906ae6d53b1e9245357f5210342844c13e2f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/uvcmetrics_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(UVCMETRICS_deps ${cdat_pkg} )
diff --git a/CMake/cdat_modules/uvcmetrics_external.cmake b/CMake/cdat_modules/uvcmetrics_external.cmake
deleted file mode 100644
index 4a9ad2d1e9652d1b6b976b9206a6221c90166b49..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/uvcmetrics_external.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-
-if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA)
-  set(UVCMETRICS_DOWNLOAD_FILES "")
-
-  file(READ "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcmetrics_test_data_md5s.txt" UVCMETRICS_FILES)
-  string(REGEX REPLACE ";" "\\\\;" UVCMETRICS_FILES "${UVCMETRICS_FILES}")
-  string(REGEX REPLACE "\n" ";" UVCMETRICS_FILES "${UVCMETRICS_FILES}")
-
-  foreach(line ${UVCMETRICS_FILES})
-    string(REGEX REPLACE " +" ";" line "${line}")
-    list(GET line 1 base_file_path)
-    list(GET line 0 FILE_MD5)
-
-    string(STRIP "${base_file_path}" base_file_path)
-    string(STRIP "${FILE_MD5}" FILE_MD5)
-
-    set(FILE_PATH "${UVCMETRICS_TEST_DATA_DIRECTORY}/${base_file_path}")
-    list(APPEND UVCMETRICS_DOWNLOAD_FILES "${FILE_PATH}")
-
-    set(FILE_URL "${LLNL_URL}/../sample_data/uvcmetrics_2.4.1/${base_file_path}")
-
-    add_custom_command(
-      OUTPUT "${FILE_PATH}"
-      COMMAND "${CMAKE_COMMAND}"
-        -D FILE_URL="${FILE_URL}"
-        -D FILE_MD5="${FILE_MD5}"
-        -D FILE_PATH="${FILE_PATH}"
-        -P "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake"
-      DEPENDS "${uvcmetrics_data_keyfile}"
-      COMMENT "Downloading ${base_file_path}"
-    )
-  endforeach()
-
-  add_custom_target(uvcmetrics_test_data ALL DEPENDS ${UVCMETRICS_DOWNLOAD_FILES})
-endif()
-
-set(GIT_CMD_STR GIT_REPOSITORY "${UVCMETRICS_SOURCE}")
-set(GIT_TAG GIT_TAG "${UVCMETRICS_BRANCH}")
-set(nm UVCMETRICS)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/uvcmetrics_pkg.cmake b/CMake/cdat_modules/uvcmetrics_pkg.cmake
deleted file mode 100644
index 7024a9580889a9e60382d7dc290b1ac90bc0c419..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/uvcmetrics_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set (nm UVCMETRICS)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_TAG})
-set(UVCMETRICS_URL ${LLNL_URL})
-set(UVCMETRICS_ZIP uvcmetrics-${UVCMETRICS_VERSION}.zip)
-#set(UVCMETRICS_SOURCE ${UVCMETRICS_URL}/${UVCMETRICS_ZIP})
-set(UVCMETRICS_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/uvcmetrics.git )
-set(UVCMETRICS_MD5)
-set(UVCMETRICS_BRANCH uvcdat-2.4.1)
-
-if (NOT CDAT_BUILD_LEAN)
-  add_cdat_package(UVCMETRICS "" "" ON)
-endif()
-
diff --git a/CMake/cdat_modules/vacumm_deps.cmake b/CMake/cdat_modules/vacumm_deps.cmake
deleted file mode 100644
index 9472871dd18d68537b135a7d297938cdb32e4508..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vacumm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(vacumm_deps ${python_pkg} ${numpy_pkg} ${scipy_pkg} ${matplotlib_pkg} ${basemap_pkg} ${configobj_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/vacumm_external.cmake b/CMake/cdat_modules/vacumm_external.cmake
deleted file mode 100644
index 0cf4556ff1f28fd5a77c4a3755b1812752c5e476..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vacumm_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# vacumm
-#
-set(vacumm_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/vacumm")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vacumm_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake"
-  @ONLY
-  )
-
-set(vacumm_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake)
-
-ExternalProject_Add(vacumm
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${vacumm_source_dir}
-  URL ${VACUMM_URL}/${VACUMM_GZ}
-  URL_MD5 ${VACUMM_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${vacumm_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${vacumm_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/vacumm_pkg.cmake b/CMake/cdat_modules/vacumm_pkg.cmake
deleted file mode 100644
index 7dea0632e87bfc3cf6599e22fba5be82ab7a4119..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vacumm_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(VACUMM_MAJOR 3)
-set(VACUMM_MINOR 0)
-set(VACUMM_PATCH 0)
-set(VACUMM_VERSION ${VACUMM_MAJOR}.${VACUMM_MINOR}.${VACUMM_PATCH})
-set(VACUMM_URL ${LLNL_URL} )
-set(VACUMM_GZ vacumm-${VACUMM_VERSION}.tar.gz)
-set(VACUMM_MD5 b468fa72ddba9d0cd39d51164bef1dd4)
-
-set (nm VACUMM)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(VACUMM_SOURCE ${VACUMM_URL}/${VACUMM_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(vacumm "" "" ON)
-else()
-  add_cdat_package(vacumm "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/visit_deps.cmake b/CMake/cdat_modules/visit_deps.cmake
deleted file mode 100644
index 023429df27eb92786a11cc36a1dbdd2048eb7ab0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/visit_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(VisIt_deps ${pkgconfig_pkg} ${sip_pkg} ${pyqt_pkg} ${paraview_pkg} ${r_pkg})
diff --git a/CMake/cdat_modules/visit_external.cmake b/CMake/cdat_modules/visit_external.cmake
deleted file mode 100644
index 7fbdb404cc169a8862f3017c892e47427b450b12..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/visit_external.cmake
+++ /dev/null
@@ -1,173 +0,0 @@
-set(VisIt_source "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt")
-set(VisIt_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt")
-set(VisIt_install "${CMAKE_INSTALL_PREFIX}")
-
-if(QT_QMAKE_EXECUTABLE)
-  get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH)
-  get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH)
-endif()
-
-GET_FILENAME_COMPONENT(CMAKE_PATH_VAR ${CMAKE_COMMAND} PATH)
-SET(VISIT_C_FLAGS "${CMAKE_C_FLAGS} -I${cdat_EXTERNALS}/include")
-GET_FILENAME_COMPONENT(VISIT_C_COMPILER ${CMAKE_C_COMPILER} NAME)
-SET(VISIT_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I${cdat_EXTERNALS}/include")
-GET_FILENAME_COMPONENT(VISIT_CXX_COMPILER ${CMAKE_CXX_COMPILER} NAME)
-SET(VISIT_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${cdat_EXTERNALS}/lib")
-
-MACRO(DETERMINE_VISIT_ARCHITECTURE ARCH)
-    IF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
-        IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc")
-            SET(${ARCH} linux-ppc)
-        ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc64")
-            SET(${ARCH} linux-ppc64)
-        ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "x86_64")
-            SET(${ARCH} linux-x86_64)
-        ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ia64")
-            SET(${ARCH} linux-ia64)
-        ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc")
-            SET(${ARCH} linux-intel)
-        ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "AIX")
-        IF($ENV{OBJECT_MODE} STREQUAL "32")
-            SET(${ARCH} "ibm-aix-pwr")
-        ELSE($ENV{OBJECT_MODE} STREQUAL "32")
-            SET(${ARCH} "ibm-aix-pwr64")
-        ENDIF($ENV{OBJECT_MODE} STREQUAL "32")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Darwin")
-        IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
-            EXECUTE_PROCESS(COMMAND uname -r
-               OUTPUT_STRIP_TRAILING_WHITESPACE
-               OUTPUT_VARIABLE _OSX_VERSION)
-            STRING(SUBSTRING ${_OSX_VERSION} 0 1 _OSX_MAJOR_VERSION)
-            IF(${_OSX_MAJOR_VERSION} STREQUAL "1")
-                # This will match 10, 11, 12, ...
-                SET(${ARCH} darwin-x86_64)
-            ELSE(${_OSX_MAJOR_VERSION} STREQUAL "1")
-                SET(${ARCH} darwin-i386)
-            ENDIF(${_OSX_MAJOR_VERSION} STREQUAL "1")
-        ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
-            SET(${ARCH} darwin-x86_64)
-        ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD")
-        SET(${ARCH} "freebsd-${CMAKE_SYSTEM_VERSION}")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "IRIX")
-        SET(${ARCH} sgi-irix6-mips2)
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "SunOS")
-        SET(${ARCH} "sun4-${CMAKE_SYSTEM_VERSION}-sparc")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Tru64")
-        SET(${ARCH} dec-osf1-alpha)
-    ELSE(${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
-        # Unhandled case. Make up a string.
-        SET(VISITARCHTMP "${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}")
-        STRING(TOLOWER ${VISITARCHTMP} ${ARCH})
-    ENDIF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
-ENDMACRO(DETERMINE_VISIT_ARCHITECTURE ARCH)
-
-# Note this is a workaround to handle build on APPLE
-IF(APPLE)
-  SET(VISIT_INSTALL_PLATFORM "darwin-x86_64")
-ELSE(APPLE)
-  DETERMINE_VISIT_ARCHITECTURE(VISIT_INSTALL_PLATFORM)
-ENDIF(APPLE)
-
-SET(VISIT_HOSTNAME "visit-uvcdat-build")
-
-
-#Add VisIt to ExternalProject
-ExternalProject_Add(VisIt
-  #DOWNLOAD_DIR ${VisIt_source} #${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${VisIt_source}
-  BINARY_DIR ${VisIt_binary}
-  INSTALL_DIR ${VisIt_install}
-  #SVN_REPOSITORY ${VISIT_SVN}
-  URL ${VISIT_URL}/${VISIT_GZ}
-  #URL_MD5 ${VISIT_MD5}
-  PATCH_COMMAND ""
-  #CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${VisIt_install} -DCMAKE_INSTALL_NAME_DIR=${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib -DVISIT_CONFIG_SITE:FILEPATH=${VisIt_source}/${VISIT_HOSTNAME}.cmake
-  DEPENDS ${VisIt_deps}
-  ${ep_log_options}
-)
-
-if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib)
-  file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib)
-endif()
-
-#add references to VisIt's cmake
-SET(TMP_STR1 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_OSX_USE_RPATH TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR2 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_QT_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR3 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_PYTHON_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR4 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_HEADERS_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR5 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_VTK_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR6 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_R_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR7 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"add_definitions(-DEXTERNAL_VTK_BUILD)\\n\")\n")
-SET(TMP_STR8 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(CMAKE_EXE_LINKER_FLAGS \\\"\\\${CMAKE_EXE_LINKER_FLAGS} ${VISIT_LINKER_FLAGS}\\\")\\n\")\n")
-SET(TMP_STR9 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_C_FLAGS \\\"\\\${VISIT_C_FLAGS} ${VISIT_C_FLAGS}\\\")\\n\")\n")
-SET(TMP_STR10 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_CXX_FLAGS \\\"\\\${VISIT_CXX_FLAGS} ${VISIT_CXX_FLAGS}\\\")\\n\")\n")
-
-FILE(WRITE   ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR1})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR2})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR3})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR4})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR5})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR6})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR7})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR8})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR9})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR10})
-
-# Before install step
-#load VisIt installation 
-ExternalProject_Add_Step(VisIt BuildVisItPatch_Step1
- COMMAND sed -e s/<object.h>/"object.h"/g ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C > ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp
- COMMAND mv ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C
-  COMMAND echo yes | svn_bin/build_visit --gpl --console --cc ${VISIT_C_COMPILER} --cxx ${VISIT_CXX_COMPILER} --alt-vtk-dir ${ParaView_binary}/VTK --alt-pyqt-dir ${CMAKE_INSTALL_PREFIX} --alt-R-dir ${cdat_EXTERNALS} --alt-netcdf-dir ${cdat_EXTERNALS} --alt-hdf5-dir ${cdat_EXTERNALS} --thirdparty-path ${CMAKE_CURRENT_BINARY_DIR}/visit-thirdparty --cmake-bin-dir ${CMAKE_PATH_VAR} --alt-python-dir ${CMAKE_INSTALL_PREFIX} --alt-qt-dir ${QT_ROOT} --no-visit --makeflags -j${VISIT_PARALLEL_PROCESSORS} --log-file ${CMAKE_BINARY_DIR}/logs/VisIt-build-out.log --no-mesa --visit-build-hostname ${VisIt_source}/${VISIT_HOSTNAME}.cmake
-  COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/visit.cmake 
-  DEPENDEES patch
-  DEPENDERS configure
-  WORKING_DIRECTORY ${VisIt_source})
-
-#After installation
-#Make symlinks of VisIt's lib, plugins, 
-#move pyqt_pyqtviewer.so and plugin into python site-packages
-message("COMMAND1: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}")
-
-message("COMMAND2: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins")
-
-ExternalProject_Add_Step(VisIt InstallVisItLibSymLink
-  COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}
-  COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins
-  DEPENDEES install
-  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR})
-
-FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "MESSAGE(STATUS \"Executing VisIt post installation steps\")\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB hdf5_files ${HDF5_install}/lib/libhdf5*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${hdf5_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB netcdf_files ${netcdf_install}/lib/libnetcdf*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${netcdf_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB z_files ${zlib_install}/lib/libz*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${z_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB curl_files ${curl_install}/lib/libcurl*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${curl_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-ExternalProject_Add_Step(VisIt InstallVisItExternalLibraries
-  COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch
-  DEPENDEES InstallVisItLibSymLink
-  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-  )
-
-# clean up un-necessary database readers
-ExternalProject_Add_Step(VisIt RemoveUnnecessaryDatabaseReaders
-  COMMAND find . ! \( -iname "*netcdf*" -o -iname "*image*" -o -iname "*hdf5*" -o -iname "*pixie*" -o -iname "*vtk*" -o -iname "*mtk*" -o -iname "*xdmf*" \) -type f -delete
-  DEPENDEES install
-  WORKING_DIRECTORY ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins/databases)
-
-FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package "r = getOption('repos'); r['CRAN'] = 'http://cran.us.r-project.org'; options(repos = r); rm(r); install.packages('ismev')")
-
-ExternalProject_Add_Step(VisIt AddRDependencies
-  COMMAND ${cdat_EXTERNALS}/bin/Rscript ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package
-  DEPENDEES install)
diff --git a/CMake/cdat_modules/visit_pkg.cmake b/CMake/cdat_modules/visit_pkg.cmake
deleted file mode 100644
index df8c7fab1644375ff1ffc41e216ddd117754ae01..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/visit_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(VISIT_MAJOR 2)
-set(VISIT_MINOR 6)
-set(VISIT_PATCH 0)
-set(VISIT_VERSION ${VISIT_MAJOR}.${VISIT_MINOR}.${VISIT_PATCH})
-set(VISIT_URL http://vis.lbl.gov/~visit)
-set(VISIT_GZ visit${VISIT_VERSION}.tar.gz)
-set(VISIT_MD5 cb7ff3e7d6e487a11786644a3b49331e )
-set(VISIT_SOURCE ${VISIT_URL}/${VISIT_GZ})
-
-add_cdat_package_dependent(VisIt "" "Build VisIt" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/vistrails_deps.cmake b/CMake/cdat_modules/vistrails_deps.cmake
deleted file mode 100644
index 98ae7150f0747af293412cdf06c25aafbcec4336..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vistrails_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(vistrails_deps ${python_pkg} ${cython_pkg} ${scipy_pkg})
diff --git a/CMake/cdat_modules/vistrails_external.cmake b/CMake/cdat_modules/vistrails_external.cmake
deleted file mode 100644
index ae8027c262e306e9a84eefd242be9fc70ae02321..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vistrails_external.cmake
+++ /dev/null
@@ -1,92 +0,0 @@
-# Create an external project to clone vistrails,
-# and configure and build it
-
-include(GetGitRevisionDescription)
-set(vistrails_branch ${VISTRAILS_BRANCH})
-
-get_git_head_revision(refspec sha)
-
-string(REGEX REPLACE ".+/(.+)" "\\1" _branch "${refspec}")
-
-# Did we extract out the branch?
-if (NOT _branch STREQUAL "${refspec}")
-    # Get the remote the branh if from
-    get_git_remote_for_branch(${_branch} _remote)
-
-    if (_remote)
-        git_remote_url(${_remote} _url)
-
-        if (_url)
-            if(_url MATCHES "^.*uvcdat.git")
-              if(_branch STREQUAL "master")
-                set(vistrails_branch ${VISTRAILS_BRANCH})
-              elseif(_branch STREQUAL "release")
-                set(vistrails_branch ${VISTRAILS_BRANCH})
-              endif()
-            elseif(_url MATCHES "^.*uvcdat-devel.git")
-              set(vistrails_branch uvcdat-next)
-            endif()
-        endif()
-    endif()
-else()
-    message(WARNING "Unable to branch from '${refspec}' using default VisTrails branch")
-endif()
-
-if("${refspec}" STREQUAL "refs/heads/devel-master")
-  set(vistrails_branch uvcdat-next)
-endif()
-
-message("[INFO] Using vistrails branch: ${vistrails_branch}")
-
-set(vistrails_tag_point_message "Specify branch of vistrails to be used for UVCDAT")
-set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}")
-set(vistrails_url "${VISTRAILS_SOURCE}")
-
-if(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT)
-  set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}" FORCE)
-endif()
-
-# For configure purposes
-set(SOURCE_DIR "${CMAKE_INSTALL_PREFIX}/vistrails")
-set(BRANCH ${VISTRAILS_TAG_POINT})
-set(GIT_URL "${vistrails_url}")
-set(GIT_TARGET "vistrails")
-
-option(CDAT_DELETE_VISTRAILS_HISTORY "Delete GIT history of vistrails" OFF)
-option(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT "Delete GIT history of vistrails" ON)
-
-set(vistrails_install_command ${cdat_BINARY_DIR}/git_clone_vistrails.sh)
-if(EXISTS "${SOURCE_DIR}")
-  configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_update.sh.in
-    ${cdat_BINARY_DIR}/git_update_vistrails.sh
-    @ONLY
-  )
-  set(vistrails_install_command ${cdat_BINARY_DIR}/git_update_vistrails.sh)
-else()
-  configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_clone.sh.in
-    ${cdat_BINARY_DIR}/git_clone_vistrails.sh
-    @ONLY
-  )
-endif()
-
-ExternalProject_Add(vistrails
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${CMAKE_INSTALL_PREFIX}
-  BUILD_IN_SOURCE 0
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ${vistrails_install_command}
-  DEPENDS ${vistrails_DEPENDENCIES}
-  ${EP_LOG_OPTIONS}
-)
-
-if(CDAT_DELETE_VISTRAILS_HISTORY)
-  ExternalProject_Add_Step(vistrails after_install
-    COMMAND ${CMAKE_COMMAND} -E remove_directory ${CMAKE_INSTALL_PREFIX}/vistrails/.git
-    DEPENDEES install
-    WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/vistrails
-  )
-endif()
-
diff --git a/CMake/cdat_modules/vistrails_pkg.cmake b/CMake/cdat_modules/vistrails_pkg.cmake
deleted file mode 100644
index de4704436d66c204677c87846a0ed19ef4586d0a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vistrails_pkg.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-set(VISTRAILS_VERSION ${VISTRAILS_TAG_POINT})
-set(VISTRAILS_SOURCE "${GIT_PROTOCOL}github.com/UV-CDAT/VisTrails.git")
-set(VISTRAILS_VERSION uvcdat-2.4.0)
-set(VISTRAILS_MD5)
-set(VISTRAILS_BRANCH uvcdat-2.4.0)
-add_cdat_package_dependent(vistrails "" "" ON "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/vtk_deps.cmake b/CMake/cdat_modules/vtk_deps.cmake
deleted file mode 100644
index 015636f1fdaeb871f354c8284bdefe76a4724808..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vtk_deps.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(VTK_deps ${pkgconfig_pkg} ${python_pkg} ${tiff_pkg} ${hdf5_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${netcdf_pkg} ${proj4_pkg})
-
-if (NOT CDAT_BUILD_GUI)
-  list(APPEND VTK_deps ${qt_pkg})
-endif()
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND VTK_deps ${ffmpeg_pkg})
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND VTK_deps ${osmesa_pkg})
-endif()
diff --git a/CMake/cdat_modules/vtk_external.cmake b/CMake/cdat_modules/vtk_external.cmake
deleted file mode 100644
index 8e926a66c7ac1a62e67b7dec3528f0989a8aa307..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vtk_external.cmake
+++ /dev/null
@@ -1,184 +0,0 @@
-set(vtk_source "${CMAKE_CURRENT_BINARY_DIR}/build/VTK")
-set(vtk_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VTK-build")
-set(vtk_install "${cdat_EXTERNALS}")
-
-set(GIT_CMD_STR GIT_REPOSITORY "${VTK_SOURCE}")
-
-set(_vtk_modules
-  vtkCommonComputationalGeometry
-  vtkCommonCore
-  vtkCommonExecutionModel
-  vtkCommonMisc
-  vtkCommonSystem
-  vtkCommonTransforms
-  vtkFiltersAMR
-  vtkFiltersCore
-  vtkFiltersExtraction
-  vtkFiltersFlowPaths
-  vtkFiltersGeneral
-  vtkFiltersGeneric
-  vtkFiltersGeometry
-  vtkFiltersHybrid
-  vtkFiltersImaging
-  vtkFiltersModeling
-  vtkFiltersSelection
-  vtkFiltersSMP
-  vtkFiltersSources
-  vtkFiltersStatistics
-  vtkFiltersTexture
-  vtkGeovisCore
-  vtkImagingColor
-  vtkImagingCore
-  vtkImagingGeneral
-  vtkImagingMath
-  vtkImagingSources
-  vtkImagingStencil
-  vtkInteractionImage
-  vtkInteractionStyle
-  vtkInteractionWidgets
-  vtkIOCore
-  vtkIOExport
-  vtkIOExportOpenGL
-  vtkIOGeometry
-  vtkIOImage
-  vtkIOImport
-  vtkRenderingCore
-  vtkRenderingFreeType
-  vtkRenderingFreeTypeOpenGL
-  vtkRenderingImage
-  vtkRenderingLabel
-  vtkRenderingOpenGL
-  vtkRenderingVolume
-  vtkRenderingVolumeOpenGL
-  vtkViewsCore
-  vtkViewsGeovis
-)
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND _vtk_modules "vtkIOFFMPEG")
-endif()
-
-# Either we use cdat zlib and libxml or system zlib and libxml
-list(APPEND vtk_build_args
-  -DVTK_USE_SYSTEM_ZLIB:BOOL=ON
-  -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON
-  -DVTK_USE_SYSTEM_HDF5:BOOL=ON
-  -DVTK_USE_SYSTEM_NETCDF:BOOL=ON
-  -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON
-  -DVTK_USE_SYSTEM_LIBPROJ4:BOOL=ON
-  -DVTK_RENDERING_BACKEND:STRING=OpenGL
-  -DLIBPROJ4_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/proj4/include
-  -DLIBPROJ4_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/proj4/lib/libproj${_LINK_LIBRARY_SUFFIX}
-#
-)
-if (APPLE)
-  list(APPEND vtk_build_args
-    -DVTK_USE_SYSTEM_PNG:BOOL=OFF
-    -DVTK_USE_SYSTEM_JPEG:BOOL=OFF
-    )
-else()
-  list(APPEND vtk_build_args
-    -DVTK_USE_SYSTEM_PNG:BOOL=ON
-    -DVTK_USE_SYSTEM_JPEG:BOOL=ON
-  )
-endif()
-
-# Turn off testing and other non essential featues
-list(APPEND vtk_build_args
-  -DBUILD_TESTING:BOOL=OFF
-  -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS}
-)
-
-# Use cdat zlib
-#if(NOT CDAT_USE_SYSTEM_ZLIB)
-#  list(APPEND vtk_build_args
-#    -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-#       -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#  )
-#endif()
-
-# Use cdat libxml
-#if(NOT CDAT_USE_SYSTEM_LIBXML2)
-#  list(APPEND vtk_build_args
-#    -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2
-#    -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX}
-#    -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint
-#  )
-#endif()
-
-# Use cdat hdf5
-if(NOT CDAT_USE_SYSTEM_HDF5)
-  list(APPEND vtk_build_args
-    -DHDF5_DIR:PATH=${cdat_EXTERNALS}/
-    -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-  )
-
-#  if(NOT CDAT_USE_SYSTEM_ZLIB)
-#    list(APPEND vtk_build_args
-#      -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#      -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#    )
-#  endif()
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND vtk_build_args
-    "-DVTK_USE_X:BOOL=OFF"
-    "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON"
-    "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-    "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}"
-    "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-  )
-endif()
-
-if(CDAT_BUILD_WEB)
-  list(APPEND vtk_build_args
-    "-DVTK_Group_Web:BOOL=ON"
-  )
-endif()
-
-set(_vtk_module_options)
-foreach(_module ${_vtk_modules})
-  list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON")
-endforeach()
-
-ExternalProject_Add(VTK
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${vtk_source}
-  BINARY_DIR ${vtk_binary}
-  INSTALL_DIR ${vtk_install}
-  ${GIT_CMD_STR}
-  GIT_TAG ${VTK_BRANCH}
-  UPDATE_COMMAND ""
-  PATCH_COMMAND ""
-  CMAKE_CACHE_ARGS
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DBUILD_TESTING:BOOL=OFF
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-    ${cdat_compiler_args}
-    ${vtk_build_args}
-    -DVTK_WRAP_PYTHON:BOOL=ON
-    -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE}
-    -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE}
-    -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY}
-    -DPYTHON_MAJOR_VERSION:STRING=${PYTHON_MAJOR}
-    -DPYTHON_MINOR_VERSION:STRING=${PYTHON_MINOR}
-    -DVTK_Group_Rendering:BOOL=OFF
-    -DVTK_Group_StandAlone:BOOL=OFF
-    -DVTK_LEGACY_SILENT:BOOL=ON
-    ${_vtk_module_options}
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${VTK_deps}
-  ${ep_log_options}
-)
-
-unset(GIT_CMD_STR)
diff --git a/CMake/cdat_modules/vtk_pkg.cmake b/CMake/cdat_modules/vtk_pkg.cmake
deleted file mode 100644
index 35504cbac69784659216e7cf9d50f10681305ad3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/vtk_pkg.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(VTK_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/VTK.git )
-set(VTK_MD5)
-set(VTK_BRANCH uvcdat-master)
-add_cdat_package_dependent(VTK "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/wget_deps.cmake b/CMake/cdat_modules/wget_deps.cmake
deleted file mode 100644
index 5c04065310fd2b58c2093466c6261eea917b72da..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/wget_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(wget_deps)
diff --git a/CMake/cdat_modules/wget_external.cmake b/CMake/cdat_modules/wget_external.cmake
deleted file mode 100644
index 157c00038636900871d7ac2d1b86ee59c52b6daa..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/wget_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(wget_source "${CMAKE_CURRENT_BINARY_DIR}/build/wget")
-set(wget_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(Wget
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${wget_source}
-  INSTALL_DIR ${wget_install}
-  URL ${WGET_URL}/${WGET_GZ}
-  URL_MD5 ${WGET_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${wget_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/wget_pkg.cmake b/CMake/cdat_modules/wget_pkg.cmake
deleted file mode 100644
index 879dfc87d6bd44b25e51e64f32176323079c6ac2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/wget_pkg.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-set(LLNL_URL http://uv-cdat.llnl.gov/cdat/resources)
-set(WGET_MAJOR 1)
-set(WGET_MINOR 12)
-set(WGET_PATCH)
-set(WGET_URL ${LLNL_URL})
-set(WGET_GZ wget-${WGET_MAJOR}.${WGET_MINOR}.tar.gz)
-set(WGET_MD5 141461b9c04e454dc8933c9d1f2abf83)
-set(WGET_SOURCE ${WGET_URL}/${WGET_GZ})
-
-add_cdat_package(Wget "" "Build Wget" SYSTEM)
-
-set (nm WGET)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR})
-if(CDAT_BUILD_WGET)
-  if(WIN32)
-    set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget.exe)
-  else()
-    set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget)
-  endif()
-endif()
-if (${WGET_EXECUTABLE} STREQUAL "WGET_EXECUTABLE-NOTFOUND")
-    set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget)
-endif()
-message("[INFO] WGET_EXECUTABLE is set to ${WGET_EXECUTABLE}")
-
-set(HASWGET ${WGET_EXECUTABLE})
-
diff --git a/CMake/cdat_modules/windfield_deps.cmake b/CMake/cdat_modules/windfield_deps.cmake
deleted file mode 100644
index bef69919b8127e0a27cafd1767327458339485a0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/windfield_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(windfield_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/windfield_external.cmake b/CMake/cdat_modules/windfield_external.cmake
deleted file mode 100644
index 0be2b03c2087a80978f97e2ce40fb4fe67be9d9b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/windfield_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Windfield`
-#
-set(windfield_source "${CMAKE_CURRENT_BINARY_DIR}/build/windfield")
-
-ExternalProject_Add(windfield
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${windfield_source}
-  URL ${windfield_URL}/${windfield_GZ}
-  URL_MD5 ${windfield_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${windfield_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/windfield_pkg.cmake b/CMake/cdat_modules/windfield_pkg.cmake
deleted file mode 100644
index 1296543c65e095571ad1f204c0dc4891eb957a45..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/windfield_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(windfield_MAJOR )
-set(windfield_MINOR )
-set(windfield_VERSION 547534c636efc)
-set(windfield_URL ${LLNL_URL} )
-set(windfield_GZ windfield-${windfield_VERSION}.tar.bz2)
-set(windfield_MD5 48989935760da881424b6adb2cb96f44 )
-set(windfield_SOURCE ${windfield_URL}/${windfield_GZ})
-
-add_cdat_package_dependent(windfield "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/windspharm_deps.cmake b/CMake/cdat_modules/windspharm_deps.cmake
deleted file mode 100644
index a6a45a3a97a31f7022ebe3709bf2b03b26909f5e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/windspharm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(windspharm_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/windspharm_external.cmake b/CMake/cdat_modules/windspharm_external.cmake
deleted file mode 100644
index a1c93750e584e28a93a0f100bf13134bd69dc6c1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/windspharm_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# windspharm
-#
-set(windspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/windspharm")
-
-ExternalProject_Add(windspharm
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${windspharm_source}
-  URL ${windspharm_URL}/${windspharm_GZ}
-  URL_MD5 ${windspharm_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${windspharm_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/windspharm_pkg.cmake b/CMake/cdat_modules/windspharm_pkg.cmake
deleted file mode 100644
index 4293b1a1c66dc382734481eebb54bebb91ce4fb4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/windspharm_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(windspharm_MAJOR )
-set(windspharm_MINOR )
-set(windspharm_VERSION 76a47fca1a)
-set(windspharm_URL ${LLNL_URL} )
-set(windspharm_GZ windspharm-${windspharm_VERSION}.zip)
-set(windspharm_MD5 8456da340724d332955f2ec946204cad)
-set(windspharm_SOURCE ${windspharm_URL}/${windspharm_GZ})
-
-add_cdat_package_dependent(windspharm "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/x264_deps.cmake b/CMake/cdat_modules/x264_deps.cmake
deleted file mode 100644
index c4169909e263a91f960fedb3a7278bd5c89137ff..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/x264_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-# Not necessary in theory, but fixes race condition that was being experienced on Ubuntu
-set(X264_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/x264_external.cmake b/CMake/cdat_modules/x264_external.cmake
deleted file mode 100644
index ad75bd4b934457034db8d377107c3e3a7686ba43..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/x264_external.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-# The X264 external project for ParaView
-set(x264_source "${CMAKE_CURRENT_BINARY_DIR}/build/X264")
-set(x264_install "${cdat_EXTERNALS}")
-set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin)
-
-find_program(YASM_BIN "yasm")
-
-if (NOT YASM_BIN)
-  set(x264_conf_args --disable-asm^^--enable-shared)
-else()
-  set(x264_conf_args --enable-shared)
-endif()
-
-ExternalProject_Add(X264
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${x264_source}
-  INSTALL_DIR ${x264_install}
-  URL ${X264_URL}/${X264_GZ}
-  URL_MD5 ${X264_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${x264_conf_args} -DBASH_CONFIGURE=ON -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${X264_deps}
-  ${ep_log_options}
-  )
-
-set(X264_INCLUDE_DIR ${x264_install}/include)
diff --git a/CMake/cdat_modules/x264_pkg.cmake b/CMake/cdat_modules/x264_pkg.cmake
deleted file mode 100644
index ba832b026ae593987a7f05b463ef5a5ec71ec2d4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/x264_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(X264_DATE 20151006)
-set(X264_TIME 2245)
-set(X264_ADDENDUM "")
-set(X264_URL ${LLNL_URL})
-set(X264_GZ x264-snapshot-${X264_DATE}-${X264_TIME}${X264_ADDENDUM}.tar.gz)
-set(X264_MD5 e8f5a0fc8db878bcdd256715472fe379)
-
-set (nm X264)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_DATE}.${${nm}_TIME})
-set(X264_SOURCE ${X264_URL}/${X264_GZ})
-
-add_cdat_package_dependent(X264 "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/xgks_external.cmake b/CMake/cdat_modules/xgks_external.cmake
deleted file mode 100644
index 1dcf2229701439d607cedd55b152084ccd1e13da..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/xgks_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-
-set(xgks_source "${CMAKE_CURRENT_BINARY_DIR}/build/xgks")
-set(xgks_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/xgks_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake
-  @ONLY)
-
-#cp -f build/xgks*/port/misc/udposix.h /home/partyd/Projects/uv-cdat/make-install/Externals/include
-
-ExternalProject_Add(xgks
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${xgks_source}
-  INSTALL_DIR ${xgks_install}
-  URL ${XGKS_URL}/${XGKS_GZ}
-  URL_MD5 ${XGKS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/yasm_deps.cmake b/CMake/cdat_modules/yasm_deps.cmake
deleted file mode 100644
index 86ac65b48bf9df3c952b6ff5a47bd3118e376e71..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/yasm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(YASM_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/yasm_external.cmake b/CMake/cdat_modules/yasm_external.cmake
deleted file mode 100644
index 9c1744b2d740d4a1ef7d75c23a17fce99d17d810..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/yasm_external.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(YASM_source "${CMAKE_CURRENT_BINARY_DIR}/build/YASM")
-set(YASM_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(YASM
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${YASM_source}
-  INSTALL_DIR ${YASM_install}
-  URL ${YASM_URL}/${YASM_GZ}
-  URL_MD5 ${YASM_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND}  -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${YASM_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/yasm_pkg.cmake b/CMake/cdat_modules/yasm_pkg.cmake
deleted file mode 100644
index d4669fe883d6107c7e6d7a39f237c059e23f1729..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/yasm_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(YASM_MAJOR_SRC 1)
-set(YASM_MINOR_SRC 2)
-set(YASM_PATCH_SRC 0)
-set(YASM_URL ${LLNL_URL})
-set(YASM_GZ yasm-${YASM_MAJOR_SRC}.${YASM_MINOR_SRC}.${YASM_PATCH_SRC}.tar.gz)
-set(YASM_MD5 4cfc0686cf5350dd1305c4d905eb55a6)
-set(YASM_SOURCE ${YASM_URL}/${YASM_GZ})
-
-set (nm YASM)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(YASM "" "" OFF)
-
diff --git a/CMake/cdat_modules/zlib_deps.cmake b/CMake/cdat_modules/zlib_deps.cmake
deleted file mode 100644
index 3f2626fb6b02a11c129a2560229639c3ac72fd13..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/zlib_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(zlib_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/zlib_external.cmake b/CMake/cdat_modules/zlib_external.cmake
deleted file mode 100644
index 09b6fd533b7e774d8f0f4eb88027051834280ad5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/zlib_external.cmake
+++ /dev/null
@@ -1,55 +0,0 @@
-
-# If Windows we use CMake otherwise ./configure
-if(WIN32)
-
-  set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/zlib")
-  set(zlib_binary "${CMAKE_CURRENT_BINARY_DIR}/zlib-build")
-  set(zlib_install "${cdat_EXTERNALS}")
-
-  ExternalProject_Add(zlib
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    SOURCE_DIR ${zlib_source}
-    BINARY_DIR ${zlib_build}
-    INSTALL_DIR ${zlib_install}
-    URL ${ZLIB_URL}/${ZLIB_GZ}
-    URL_MD5 ${ZLIB_MD5}
-    PATCH_COMMAND ${CMAKE_COMMAND} -E remove <SOURCE_DIR>/zconf.h
-    CMAKE_CACHE_ARGS
-      -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags}
-      -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags}
-      -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-      ${pv_tpl_compiler_args}
-      ${zlib_EXTRA_ARGS}
-    CMAKE_ARGS
-      -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-    ${ep_log_options}
-  )
-
-else()
-
-  set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/build/zlib")
-  set(zlib_install "${cdat_EXTERNALS}")
-  set(CONFIGURE_ARGS --shared)
-
-  ExternalProject_Add(zlib
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    SOURCE_DIR ${zlib_source}
-    INSTALL_DIR ${zlib_install}
-    URL ${ZLIB_URL}/${ZLIB_GZ}
-    URL_MD5 ${ZLIB_MD5}
-    PATCH_COMMAND ${CMAKE_COMMAND} -E remove <SOURCE_DIR>/zconf.h
-    BUILD_IN_SOURCE 1
-    CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake
-    DEPENDS ${zlib_deps}
-    ${ep_log_options}
-  )
-
-endif()
-
-set(ZLIB_INCLUDE_DIR ${zlib_install}/include)
-
-if(WIN32)
-  set(ZLIB_LIBRARY optimized ${zlib_install}/lib/zlib${_LINK_LIBRARY_SUFFIX} debug ${zlib_install}/lib/zlibd${_LINK_LIBRARY_SUFFIX})
-else()
-  set(ZLIB_LIBRARY ${ZLIB_LIBRARY_PATH}/libz${_LINK_LIBRARY_SUFFIX})
-endif()
diff --git a/CMake/cdat_modules/zlib_pkg.cmake b/CMake/cdat_modules/zlib_pkg.cmake
deleted file mode 100644
index a34c30885eb87b19090918e10c2dc452ba4e071e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/zlib_pkg.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-set(ZLIB_PATCH_SRC 5 CACHE INTEGER "Version of ZLIB to use")
-MARK_AS_ADVANCED(ZLIB_PATCH_SRC)
-if (ZLIB_PATCH_SRC EQUAL 3)
-  set(ZLIB_MD5 debc62758716a169df9f62e6ab2bc634)
-elseif (ZLIB_PATCH_SRC EQUAL 5)
-  set(ZLIB_MD5 c735eab2d659a96e5a594c9e8541ad63)
-elseif (ZLIB_PATCH_SRC EQUAL 7)
-  set(ZLIB_MD5 60df6a37c56e7c1366cca812414f7b85)
-elseif (ZLIB_PATCH_SRC EQUAL 8)
-  set(ZLIB_MD5 44d667c142d7cda120332623eab69f40)
-else ()
-  message(FATAL_ERROR "error: invalid zlib patch number: '${ZLIB_PATCH_SRC}' valid: 3, 5, 7 or 8")
-endif()
-
-set(ZLIB_MAJOR_SRC 1)
-set(ZLIB_MINOR_SRC 2)
-#ZLIB_PATH_SRC and md5 is configured in CMakeLists.txt because on some RedHat system we need to change it ; # I don't believe this is true anymore durack1 23 Nov 2014
-#set(ZLIB_PATCH_SRC 8)
-set(ZLIB_VERSION ${ZLIB_MAJOR_SRC}.${ZLIB_MINOR_SRC}.${ZLIB_PATCH_SRC})
-set(ZLIB_URL ${LLNL_URL})
-set(ZLIB_GZ zlib-${ZLIB_VERSION}.tar.gz)
-set(ZLIB_SOURCE ${ZLIB_URL}/${ZLIB_GZ})
-
-add_cdat_package(zlib "" "" OFF)
diff --git a/CMake/cdat_modules/zmq_deps.cmake b/CMake/cdat_modules/zmq_deps.cmake
deleted file mode 100644
index 1ef560a28fc549b3c801c2a694ea564fb9ad528d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/zmq_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ZMQ_deps ${sphinx_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/zmq_external.cmake b/CMake/cdat_modules/zmq_external.cmake
deleted file mode 100644
index c4637b4b46392ef5ff3177c18617b6ba50d1a0bf..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/zmq_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(zmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/ZMQ")
-set(zmq_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(ZMQ
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${zmq_source}
-  INSTALL_DIR ${zmq_install}
-  URL ${ZMQ_URL}/${ZMQ_GZ}
-  URL_MD5 ${ZMQ_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${ZMQ_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/zmq_pkg.cmake b/CMake/cdat_modules/zmq_pkg.cmake
deleted file mode 100644
index 26776a9f93378cf1cafdcfbb77bc634f65258857..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules/zmq_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(ZMQ_MAJOR 4)
-set(ZMQ_MINOR 0)
-set(ZMQ_PATCH 4)
-set(ZMQ_VERSION ${ZMQ_MAJOR}.${ZMQ_MINOR}.${ZMQ_PATCH})
-set(ZMQ_URL ${LLNL_URL})
-set(ZMQ_GZ zeromq-${ZMQ_VERSION}.tar.gz)
-set(ZMQ_MD5 f3c3defbb5ef6cc000ca65e529fdab3b)
-set(ZMQ_SOURCE ${ZMQ_URL}/${ZMQ_GZ})
-
-add_cdat_package(ZMQ "" "" OFF)
diff --git a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in b/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in
deleted file mode 100644
index cd9a7e5394f898128360559ffd26829c07f47080..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in
+++ /dev/null
@@ -1,38 +0,0 @@
-
-if(WIN32)
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/Release/lapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/Release/blas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/Release/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include"
-    )
-
-else()
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/liblapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/libblas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include"
-    )
-
-endif()
diff --git a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in b/CMake/cdat_modules_extra/ESMF_install_step.cmake.in
deleted file mode 100644
index 8e754914ffc278e0ac443fba4395c2dcaed50111..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in
+++ /dev/null
@@ -1,35 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{ESMF_DIR} @ESMF_source@/esmf)
-set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@)
-set(ENV{ESMF_PTHREADS} @ESMF_pthreads@)
-set(ENV{ESMF_OS} @ESMF_os@)
-set(ENV{ESMF_COMPILER} @ESMF_compiler@)
-set(ENV{ESMF_COMM} @ESMF_comm@)
-set(ENV{ESMF_ABI} @ESMF_abi@)
-set(ENV{ESMF_OPENMP} @ESMF_openmp@)
-set(ENV{ESMF_MOAB} OFF)
-set(ENV{ESMF_ARRAYLITE} TRUE)
-set(ENV{CFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran})
-set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{FFLAGS} ${cdat_osx_flags_fortran})
-
-execute_process(
-  COMMAND make install
-  WORKING_DIRECTORY @ESMF_source@/esmf
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-set(ESMF_source @ESMF_source@)
-set(ESMF_install @ESMF_install@)
-set(ESMF_COMM @mpiuni@)
-set(ESMF_pthreads @ESMF_pthreads@)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in b/CMake/cdat_modules_extra/ESMF_make_step.cmake.in
deleted file mode 100644
index 2240671640b200cb23e00fe4e0f9b1c209278758..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in
+++ /dev/null
@@ -1,45 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{ESMF_DIR} @ESMF_source@/esmf)
-set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@)
-set(ENV{ESMF_PTHREADS} @ESMF_pthreads@)
-set(ENV{ESMF_OS} @ESMF_os@)
-set(ENV{ESMF_COMPILER} @ESMF_compiler@)
-set(ENV{ESMF_COMM} @ESMF_comm@)
-set(ENV{ESMF_ABI} @ESMF_abi@)
-set(ENV{ESMF_OPENMP} @ESMF_openmp@)
-set(ENV{ESMF_MOAB} OFF)
-set(ENV{ESMF_ARRAYLITE} TRUE)
-set(ENV{CFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran})
-set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{FFLAGS} ${cdat_osx_flags_fortran})
-
-
-
-## Store the configuration used to build ESMF
-set(outfile @ESMF_source@/set_esmf_env_ser.sh)
-file(WRITE ${outfile} "# ESMF compiled with these environment variables\n\n")
-file(APPEND ${outfile} "export ESMF_DIR=@ESMF_source@/esmf\n")
-file(APPEND ${outfile} "export ESMF_INSTALL_PREFIX=@ESMF_install@\n")
-file(APPEND ${outfile} "export ESMF_THREADS=@ESMF_pthreads@\n")
-file(APPEND ${outfile} "export ESMF_COMM=@ESMF_COMM@\n")
-file(APPEND ${outfile} "\n")
-file(APPEND ${outfile} "# Full information regarding the install is found in:\n")
-file(GLOB_RECURSE ESMF_mkfile "@ESMF_install@/lib/libO/e*.mk")
-file(APPEND ${outfile} "# "${ESMF_mkfile}"\n")
-
-# make should be detected by CMAKE at some point
-execute_process(
-  COMMAND make 
-  WORKING_DIRECTORY @ESMF_source@/esmf
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make:\n" ${res})
-endif()
-message("Make succeeded.")
diff --git a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in b/CMake/cdat_modules_extra/ESMP_install_step.cmake.in
deleted file mode 100644
index 3d5d01f42a0ef03caf7083e144f419007dc4d580..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in
+++ /dev/null
@@ -1,34 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-file(GLOB_RECURSE ESMP_esmfmkfile "@cdat_EXTERNALS@/lib/libO/*.mk")
-
-set(ENV{ESMFMKFILE} ${ESMP_esmfmkfile})
-foreach( item ${ESMP_esmfmkfile})
-  message("item " ${item})
-endforeach( item ${ESMP_esmfmkfile})
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" generateESMP_Config.py
-  WORKING_DIRECTORY @ESMP_source@
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-
-if(NOT EXISTS @ESMP_source@/src/ESMP_Config.py)
-  message(FATAL_ERROR "ESMP_Config.py not created")
-endif()
-
-file(INSTALL @ESMP_source@ DESTINATION @PYTHON_SITE_PACKAGES@)
-
-if(NOT EXISTS @PYTHON_SITE_PACKAGES@/ESMP/src/ESMP_Config.py)
-  message(FATAL_ERROR "Install process failed")
-endif()
-
-
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in b/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in
deleted file mode 100644
index 823fcdb8a6590e97e5360561620f7519e56bc916..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in
+++ /dev/null
@@ -1,9 +0,0 @@
-# Patch ESMP_LoadESMF.py to allow relative loading of config file
-if(NOT WIN32)
-  execute_process(
-    WORKING_DIRECTORY @ESMP_source@/src
-    COMMAND patch -p1 
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/ESMP.patch
-  )
-endif()
-
diff --git a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in
deleted file mode 100644
index cee8497ed350f0d6370c5bf7fd9a4f7c3cc1ada5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in
+++ /dev/null
@@ -1,42 +0,0 @@
-message("Configuring NUMPY:\n@NUMPY_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-# As explained in site.cfg.example - See http://projects.scipy.org/numpy/browser/trunk/site.cfg.example
-# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep
-# On windows, the separator is ";" and ":" on unix-like platform
-set(path_sep ":")
-if(WIN32)
-  set(path_sep ";")
-endif()
-
-# As explained in site.cfg.example, the library name without the prefix "lib" should be used.
-# Nevertheless, on windows, only "libf2c" leads to a successful configuration and
-# installation of NUMPY
-set(f2c_libname "f2c")
-if(WIN32)
-  set(f2c_libname "libf2c")
-endif()
-
-# setup the site.cfg file
-file(WRITE "@NUMPY_binary@/site.cfg"
-"
-[blas]
-library_dirs = @cdat_EXTERNALS@/lib${path_sep}@cdat_EXTERNALS@/lib
-libraries = blas,${f2c_libname}
-
-[lapack]
-library_dirs = @cdat_EXTERNALS@/lib
-lapack_libs = lapack
-")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py config
-  WORKING_DIRECTORY "@NUMPY_binary@"
-  RESULT_VARIABLE res
-  )
-
-if(NOT ${res} EQUAL 0)
-  message(FATAL_ERROR "Error in config of NUMPY")
-endif()
-message("Numpy config worked.")
diff --git a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in
deleted file mode 100644
index feac845d50759d98b2bbcdaccc9f24ba98523aae..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in
+++ /dev/null
@@ -1,19 +0,0 @@
-message("Installing NUMPY:\n@NUMPY_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@NUMPY_binary@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}")
-  message(FATAL_ERROR "Error in config of NUMPY")
-endif()
-message("Numpy install succeeded.")
diff --git a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in
deleted file mode 100644
index 94b92ebd2eacac82ca23d764f050f553616a167b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-    set(ENV{LDFLAGS} "$ENV{LDFLAGS}")
-else()
-    set(ENV{LDFLAGS} "$ENV{LDFLAGS} -shared")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@NUMPY_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE NUMPY_OUT
-  OUTPUT_VARIABLE NUMPY_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("[ERROR] NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}")
-  message(FATAL_ERROR "[ERROR] Error in config of NUMPY")
-endif()
diff --git a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in
deleted file mode 100644
index 4a6e82762110e82c2f2fb44c47f7a87e3ff48f91..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing PYLIBXML2:\n@PYLIBXML2_PREFIX_ARGS@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@PYLIBXML2_binary@/python"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE PYLIBXML2_OUT
-  OUTPUT_VARIABLE PYLIBXML2_ERR
-)
-
-if(NOT ${res} EQUAL 0)
-  message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}")
-  message(FATAL_ERROR "Error in config of PYLIBXML2")
-endif()
-message("libxml2-python bindings install succeeded.")
diff --git a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in
deleted file mode 100644
index 562cb24ff1397ae746a7bb4f3abffdf6d2b8c0fa..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in
+++ /dev/null
@@ -1,24 +0,0 @@
-message("Building libxml2 python bindings:\n@PYLIBXML2_binary@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-set(cdat_EXTERNALS @cdat_EXTERNALS@)
-configure_file(@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/PYLIBXML2_setup.py.in
-  @cdat_BINARY_DIR@/build/PYLIBXML2/python/setup.py)
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@PYLIBXML2_binary@/python"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE PYLIBXML2_OUT
-  OUTPUT_VARIABLE PYLIBXML2_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}")
-  message(FATAL_ERROR "Error in config of PYLIBXML2")
-endif()
-message("libxml2_python bindings build worked.")
diff --git a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in b/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in
deleted file mode 100755
index 22fdbfb9bebdca890b82bfffe4d300f6b00edad6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/python -u
-#
-# Setup script for libxml2 and libxslt if found
-#
-import sys, os
-from distutils.core import setup, Extension
-
-# Below ROOT, we expect to find include, include/libxml2, lib and bin.
-# On *nix, it is not needed (but should not harm), 
-# on Windows, it is set by configure.js.
-ROOT = r'/usr' 
-
-# Thread-enabled libxml2
-with_threads = 1
-
-# If this flag is set (windows only), 
-# a private copy of the dlls are included in the package.
-# If this flag is not set, the libxml2 and libxslt
-# dlls must be found somewhere in the PATH at runtime.
-WITHDLLS = 1 and sys.platform.startswith('win')
-
-def missing(file):
-    if os.access(file, os.R_OK) == 0:
-        return 1
-    return 0
-
-try:
-    HOME = os.environ['HOME']
-except:
-    HOME="C:"
-
-if WITHDLLS:
-    # libxml dlls (expected in ROOT/bin)
-    dlls = [ 'iconv.dll','libxml2.dll','libxslt.dll','libexslt.dll' ]
-    dlls = map(lambda dll: os.path.join(ROOT,'bin',dll),dlls)
-
-    # create __init__.py for the libxmlmods package
-    if not os.path.exists("libxmlmods"):
-        os.mkdir("libxmlmods")
-        open("libxmlmods/__init__.py","w").close()
-
-    def altImport(s):
-        s = s.replace("import libxml2mod","from libxmlmods import libxml2mod")
-        s = s.replace("import libxsltmod","from libxmlmods import libxsltmod")
-        return s
-
-if sys.platform.startswith('win'):
-    libraryPrefix = 'lib'
-    platformLibs = []
-else:
-    libraryPrefix = ''
-    platformLibs = ["m","z"]
-
-# those are examined to find 
-# - libxml2/libxml/tree.h
-# - iconv.h 
-# - libxslt/xsltconfig.h
-includes_dir = [
-"/usr/include",
-"/usr/local/include",
-"/opt/include",
-os.path.join(ROOT,'include'),
-HOME,
-"@cdat_EXTERNALS@/include"
-];
-
-xml_includes=""
-for dir in includes_dir:
-    if not missing(dir + "/libxml2/libxml/tree.h"):
-        xml_includes=dir + "/libxml2"
-	break;
-
-if xml_includes == "":
-    print "failed to find headers for libxml2: update includes_dir"
-    sys.exit(1)
-
-iconv_includes=""
-for dir in includes_dir:
-    if not missing(dir + "/iconv.h"):
-        iconv_includes=dir
-	break;
-
-if iconv_includes == "":
-    print "failed to find headers for libiconv: update includes_dir"
-    sys.exit(1)
-
-# those are added in the linker search path for libraries
-libdirs = [
-os.path.join(ROOT,'lib'),
-]
-
-xml_files = ["libxml2-api.xml", "libxml2-python-api.xml",
-             "libxml.c", "libxml.py", "libxml_wrap.h", "types.c",
-	     "xmlgenerator.py", "README", "TODO", "drv_libxml2.py"]
-
-xslt_files = ["libxslt-api.xml", "libxslt-python-api.xml",
-             "libxslt.c", "libxsl.py", "libxslt_wrap.h",
-	     "xsltgenerator.py"]
-
-if missing("libxml2-py.c") or missing("libxml2.py"):
-    try:
-	try:
-	    import xmlgenerator
-	except:
-	    import generator
-    except:
-	print "failed to find and generate stubs for libxml2, aborting ..."
-	print sys.exc_type, sys.exc_value
-	sys.exit(1)
-
-    head = open("libxml.py", "r")
-    generated = open("libxml2class.py", "r")
-    result = open("libxml2.py", "w")
-    for line in head.readlines():
-        if WITHDLLS:
-            result.write(altImport(line))
-        else:
-            result.write(line)
-    for line in generated.readlines():
-	result.write(line)
-    head.close()
-    generated.close()
-    result.close()
-
-with_xslt=0
-if missing("libxslt-py.c") or missing("libxslt.py"):
-    if missing("xsltgenerator.py") or missing("libxslt-api.xml"):
-        print "libxslt stub generator not found, libxslt not built"
-    else:
-	try:
-	    import xsltgenerator
-	except:
-	    print "failed to generate stubs for libxslt, aborting ..."
-	    print sys.exc_type, sys.exc_value
-	else:
-	    head = open("libxsl.py", "r")
-	    generated = open("libxsltclass.py", "r")
-	    result = open("libxslt.py", "w")
-	    for line in head.readlines():
-                if WITHDLLS:
-                    result.write(altImport(line))
-                else:
-                    result.write(line)
-	    for line in generated.readlines():
-		result.write(line)
-	    head.close()
-	    generated.close()
-	    result.close()
-	    with_xslt=1
-else:
-    with_xslt=1
-
-if with_xslt == 1:
-    xslt_includes=""
-    for dir in includes_dir:
-	if not missing(dir + "/libxslt/xsltconfig.h"):
-	    xslt_includes=dir + "/libxslt"
-	    break;
-
-    if xslt_includes == "":
-	print "failed to find headers for libxslt: update includes_dir"
-	with_xslt = 0
-
-
-descr = "libxml2 package"
-modules = [ 'libxml2', 'drv_libxml2' ]
-if WITHDLLS:
-    modules.append('libxmlmods.__init__')
-c_files = ['libxml2-py.c', 'libxml.c', 'types.c' ]
-includes= [xml_includes, iconv_includes]
-libs    = [libraryPrefix + "xml2"] + platformLibs
-macros  = []
-if with_threads:
-    macros.append(('_REENTRANT','1'))
-if with_xslt == 1:
-    descr = "libxml2 and libxslt package"
-    if not sys.platform.startswith('win'):
-        #
-        # We are gonna build 2 identical shared libs with merge initializing
-        # both libxml2mod and libxsltmod
-        #
-        c_files = c_files + ['libxslt-py.c', 'libxslt.c']
-        xslt_c_files = c_files
-        macros.append(('MERGED_MODULES', '1'))
-    else:
-        #
-        # On windows the MERGED_MODULE option is not needed
-        # (and does not work)
-        #
-        xslt_c_files = ['libxslt-py.c', 'libxslt.c', 'types.c']
-    libs.insert(0, libraryPrefix + 'exslt')
-    libs.insert(0, libraryPrefix + 'xslt')
-    includes.append(xslt_includes)
-    modules.append('libxslt')
-
-
-extens=[Extension('libxml2mod', c_files, include_dirs=includes,
-                  library_dirs=libdirs, 
-                  libraries=libs, define_macros=macros)] 
-if with_xslt == 1:
-    extens.append(Extension('libxsltmod', xslt_c_files, include_dirs=includes,
-			    library_dirs=libdirs, 
-                            libraries=libs, define_macros=macros))
-
-if missing("MANIFEST"):
-
-    manifest = open("MANIFEST", "w")
-    manifest.write("setup.py\n")
-    for file in xml_files:
-        manifest.write(file + "\n")
-    if with_xslt == 1:
-	for file in xslt_files:
-	    manifest.write(file + "\n")
-    manifest.close()
-
-if WITHDLLS:
-    ext_package = "libxmlmods"
-    if sys.version >= "2.2":
-        base = "lib/site-packages/"
-    else:
-        base = ""
-    data_files = [(base+"libxmlmods",dlls)]
-else:
-    ext_package = None
-    data_files = []
-
-setup (name = "libxml2-python",
-       # On *nix, the version number is created from setup.py.in
-       # On windows, it is set by configure.js
-       version = "2.7.8",
-       description = descr,
-       author = "Daniel Veillard",
-       author_email = "veillard@redhat.com",
-       url = "http://xmlsoft.org/python.html",
-       licence="MIT Licence",
-       py_modules=modules,
-       ext_modules=extens,
-       ext_package=ext_package,
-       data_files=data_files,
-       )
-
-sys.exit(0)
-
diff --git a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in
deleted file mode 100644
index 00d381497258c553fd41f30c964070f5d707e6c5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in
+++ /dev/null
@@ -1,62 +0,0 @@
-message("Configuring SCIPY:\n@SCIPY_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{PYTHONPATH} "@PYTHONPATH@")
-set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}")
-
-# As explained in site.cfg.example - See http://projects.scipy.org/scipy/browser/trunk/site.cfg.example
-# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep
-# On windows, the separator is ";" and ":" on unix-like platform
-set(path_sep ":")
-if(WIN32)
-  set(path_sep ";")
-endif()
-
-# As explained in site.cfg.example, the library name without the prefix "lib" should be used.
-# Nevertheless, on windows, only "libf2c" leads to a successful configuration and
-# installation of SCIPY
-set(f2c_libname "f2c")
-if(WIN32)
-  set(f2c_libname "libf2c")
-endif()
-
-set(_blas_dirs)
-set(_lapack_dirs)
-
-if (CDAT_USE_SYSTEM_LAPACK)
-  foreach(_path ${BLAS_LIBRARIES})
-    get_filename_component(_dir ${_path} PATH)
-    list(APPEND _blas_dirs ${_dir})
-  endforeach()
-
-  foreach(_path ${LAPACK_LIBRARIES})
-    get_filename_component(_dir ${_path} PATH)
-    list(APPEND _lapack_dirs ${_dir})
-  endforeach()
-else()
-  set(_blas_dirs @cdat_EXTERNALS@/lib)
-  set(_lapack_dirs @cdat_EXTERNALS@/lib)
-endif()
-
-# setup the site.cfg file
-file(WRITE "@SCIPY_binary@/site.cfg"
-"
-[blas]
-library_dirs = ${_blas_dirs}
-libraries = blas,${f2c_libname}
-
-[lapack]
-library_dirs = ${_lapack_dirs}
-lapack_libs = lapack
-")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py config
-  WORKING_DIRECTORY "@SCIPY_binary@"
-  RESULT_VARIABLE res
-  )
-
-if(NOT ${res} EQUAL 0)
-  message(FATAL_ERROR "Error in config of SCIPY")
-endif()
-message("Scipy config worked.")
diff --git a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in
deleted file mode 100644
index 34a3e9edaeeb9dc4deb3b1d357395b44d5daca56..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing SCIPY:\n@SCIPY_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{PYTHONPATH} "@PYTHONPATH@")
-set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@SCIPY_binary@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}")
-  message(FATAL_ERROR "Error in config of SCIPY")
-endif()
-message("Scipy install succeeded.")
diff --git a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in
deleted file mode 100644
index c8d533cb18a385efc79d8c8bf78dfba3f7e0c012..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in
+++ /dev/null
@@ -1,22 +0,0 @@
-message("Building SCIPY:\n@SCIPY_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{PYTHONPATH} "@PYTHONPATH@")
-set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@SCIPY_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE SCIPY_OUT
-  OUTPUT_VARIABLE SCIPY_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}")
-  message(FATAL_ERROR "Error in config of SCIPY")
-endif()
-message("Scipy build worked.")
diff --git a/CMake/cdat_modules_extra/basemap_install_step.cmake.in b/CMake/cdat_modules_extra/basemap_install_step.cmake.in
deleted file mode 100644
index 95cb49de75aa2f495cb1c6a960a985a4a692cf55..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/basemap_install_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Installing basemap:\n@basemap_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{GEOS_DIR} "@cdat_EXTERNALS@")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@basemap_binary@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}")
-  message(FATAL_ERROR "Error in config of basemap")
-endif()
-message("Numpy install succeeded.")
diff --git a/CMake/cdat_modules_extra/basemap_make_step.cmake.in b/CMake/cdat_modules_extra/basemap_make_step.cmake.in
deleted file mode 100644
index 0789e4ddab7cb7b64a3a67e956efff2f44fa195e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/basemap_make_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Building basemap:\n@basemap_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{GEOS_DIR} "@cdat_EXTERNALS@")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@basemap_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE basemap_OUT
-  OUTPUT_VARIABLE basemap_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}")
-  message(FATAL_ERROR "Error in config of basemap")
-endif()
-message("basemap build worked.")
diff --git a/CMake/cdat_modules_extra/cdat.in b/CMake/cdat_modules_extra/cdat.in
deleted file mode 100755
index 7bfcf620b31461c629c765ebaddaa3bdf56f1ece..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat.in
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-# source is not portable whereas . is
-. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh"
-python@PYVER@ "$@"
diff --git a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in
deleted file mode 100644
index 5f5674fc187df2d1c984d8d62d572281c74202d9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-
-if(NOT APPLE)
-  include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-endif()
-
-execute_process(
-  COMMAND make "${BUILD_ARGS}"
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE  CDAT_ERR
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
-
diff --git a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in b/CMake/cdat_modules_extra/cdat_common_environment.cmake.in
deleted file mode 100644
index 7a29f80050ff4f7b7a7e5e995984594bbc045409..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in
+++ /dev/null
@@ -1,39 +0,0 @@
-message("[INFO] ADDITIONAL CFLAGS ${ADDITIONAL_CFLAGS}")
-set(ENV{PATH} "@SB_BIN_DIR@:@cdat_EXTERNALS@/bin:$ENV{PATH}")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:@cdat_EXTERNALS@/lib64:@cdat_EXTERNALS@/lib/paraview-@PARAVIEW_MAJOR@.@PARAVIEW_MINOR@:$ENV{@LIBRARY_PATH@}")
-if (NOT DEFINED SKIP_LDFLAGS)
- set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib -L@cdat_EXTERNALS@/lib64 @cdat_external_link_directories@ -Wl,-rpath,@cdat_EXTERNALS@/lib64 @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib")
-endif()
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cxxflags@ ${ADDITIONAL_CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{PYTHONPATH} @PYTHONPATH@)
-set(ENV{CC} @CMAKE_C_COMPILER@)
-
-if(APPLE)
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-endif()
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-set(PYTHONUSERBASE @PYTHON_SITE_PACKAGES_PREFIX@)
-#if ("@EGG_INSTALLER@" STREQUAL "PIP")
-#    # Set python userbase so that pip install packages locally
-#    set(PYTHONUSERBASE @CMAKE_INSTALL_PREFIX@)
-#    set(EGG_CMD env @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PYTHONUSERBASE=${PYTHONUSERBASE} @PIP_BINARY@ install --user -v --download-cache @CDAT_PACKAGE_CACHE_DIR@ )
-#    if (NOT "${PIP_CERTIFICATE}" STREQUAL "")
-#        set(EGG_CMD ${EGG_CMD} --cert=${PIP_CERTIFICATE})
-#    endif()
-#else()
-#    set(EGG_CMD env @LD_LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} @EASY_INSTALL_BINARY@ )
-#endif()
-
diff --git a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in b/CMake/cdat_modules_extra/cdat_configure_step.cmake.in
deleted file mode 100644
index 32ecb43f0d69c093fd5c75191d4eb881d7145ed5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in
+++ /dev/null
@@ -1,30 +0,0 @@
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-if (BASH_CONFIGURE)
-	set(CONFIGURE_SHELL "bash")
-else()
-	set(CONFIGURE_SHELL "sh")
-endif()
-
-if (CONF_PATH_XTRA)
-  message("[INFO] configure is in subdirectory: ${CONF_PATH_XTRA}")
-else()
-  set(CONF_PATH_XTRA ".")
-endif()
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}")
-message("LD_ARGS IS $ENV{@LIBRARY_PATH@}")
-message("CFLAGS : $ENV{CFLAGS}")
-
-execute_process(
-  COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} ${CONFIGURE_SHELL} ${CONF_PATH_XTRA}/configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in b/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in
deleted file mode 100644
index f8cf8e0fa8200df7e5edeb7d1f40333576560245..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in
+++ /dev/null
@@ -1,10 +0,0 @@
-file(READ @cdat_SOURCE_DIR@/Packages/dat/files.txt SAMPLE_FILES)
-string(REPLACE "\n" ";" SAMPLE_LIST ${SAMPLE_FILES})
-foreach(SAMPLE_FILE ${SAMPLE_LIST})
-  STRING(REPLACE "  " ";" DOWNLOAD_LIST ${SAMPLE_FILE})
-  LIST(GET DOWNLOAD_LIST 0 MD5)
-  LIST(GET DOWNLOAD_LIST 1 FILE_NM)
-  message("[INFO] Attempting to download http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} WITH MD5 ${MD5} to @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM}")
-  file(DOWNLOAD http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM} EXPECTED_MD5=${MD5})
-endforeach()
-set(res 0)
diff --git a/CMake/cdat_modules_extra/cdat_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_install_step.cmake.in
deleted file mode 100644
index 62fe3fa78c825646288a291c88adc99f5267d73a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND make install ${INSTALL_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/cdat_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_make_step.cmake.in
deleted file mode 100644
index 59a4f113e2959421e49d961109b0cdddb884f00f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_make_step.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-if(NOT ${make})
-  set(make make)
-endif()
-execute_process(
-  COMMAND env CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} CPPFLAGS=$ENV{CPPFLAGS} CXXFLAGS=$ENV{CXXFLAG} ${make} -j ${BUILD_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("[ERROR] Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-
diff --git a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in
deleted file mode 100644
index 87855421a030ed6c4b1f58a62d57614a7376bd51..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in
+++ /dev/null
@@ -1,30 +0,0 @@
-
-if(NOT APPLE)
-  set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-endif()
-
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CPPFLAGS@")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CXXFLAGS@")
-set(ENV{CFLAGS} "-w -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CFLAGS@")
-set(ENV{LOCNCCONFIG} "@cdat_EXTERNALS@/bin/nc-config")
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-message("Running cdat install with path: " $ENV{PATH})
-
-if(APPLE)
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-endif()
-
-execute_process(
-    COMMAND  "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/install.py @cdat_xtra_flags@ --enable-pp @SAMPLE_DATA@ @PYTHON_EXTRA_PREFIX@ @CDMS_ONLY@
-  WORKING_DIRECTORY "@WORKING_DIR@"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Python Install. ${res}")
-endif()
-message("Make succeeded.")
-
diff --git a/CMake/cdat_modules_extra/cdat_python_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_step.cmake.in
deleted file mode 100644
index cf32905a1ded2efdee5de6e1804a7ebd3c1f26dd..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdat_python_step.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-if(NOT ${make})
-  set(make make)
-endif()
-
-execute_process(
-  COMMAND ${make} ${BUILD_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
-
diff --git a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in b/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
deleted file mode 100644
index b8dd0172d102a0a97629c2b337914785dff9abbe..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
+++ /dev/null
@@ -1,22 +0,0 @@
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{CC} mpicc)
-set(ENV{CXX} mpicxx)
-
-message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}")
-message("LD_ARGS IS $ENV{@LIBRARY_PATH@}")
-message("CFLAGS : $ENV{CFLAGS}")
-
-execute_process(
-  COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/checked_get.sh.in b/CMake/cdat_modules_extra/checked_get.sh.in
deleted file mode 100755
index 7a38feab45f06d4b2aa3d38d29da08832bc07b48..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/checked_get.sh.in
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/env bash
-
-# Does an md5 check between local and remote resource
-# returns 0 (success) iff there is no match and thus indicating that
-# an update is available.
-# USAGE: checked_for_update [file] http://www.foo.com/file
-#
-check_for_update() {
-    local local_file
-    local remote_file
-    if (( $# == 1 )); then
-        remote_file=${1}
-        local_file=$(readlink -f ${1##*/})
-    elif (( $# == 2 )); then
-        local_file="../sources/"${1}
-        remote_file=${2}
-    else
-        echo "function \"checked_for_update\":  Called with incorrect number of args! (fatal)"
-        exit 1
-    fi                      
-    echo "Local file is:",${local_file}
-    [ ! -e ${local_file} ] && echo " WARNING: Could not find local file ${local_file}" && return 0
-    diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null
-    [ $? != 0 ] && echo " Update Available @ ${remote_file}" && return 0
-    echo " ==> ${local_file} is up to date"
-    return 1
-}
-
-# If an update is available then pull it down... then check the md5 sums again!
-#
-#  Yes, this results in 3 network calls to pull down a file, but it
-#  saves total bandwidth and it also allows the updating from the
-#  network process to be cronttab-able while parsimonious with
-#  resources.  It is also very good practice to make sure that code
-#  being executed is the RIGHT code!
-#
-# NOTE: Has multiple return values test for (( $? > 1 )) when looking or errors
-#       A return value fo 1 only means that the file is up-to-date and there
-#       Is no reason to fetch it.
-#
-# USAGE: checked_get [file] http://www.foo.com/file
-#
-checked_get() {
-    check_for_update $@
-    [ $? != 0 ] && return 1
-
-    local local_file
-    local remote_file
-    if (( $# == 1 )); then
-        remote_file=${1}
-        local_file=${1##*/}
-    elif (( $# == 2 )); then
-        local_file="../sources/"${1}
-        remote_file=${2}
-    else
-        echo "function \"checked_get\":  Called with incorrect number of args! (fatal)"
-        exit 1
-    fi                      
-    echo "Local file 2 :",${local_file}
-    if [ -e ${local_file} ]; then
-	cp -v ${local_file} ${local_file}.bak 
-	chmod 600 ${local_file}.bak
-#	return 0
-    fi
-    @HASWGET@ -O ${local_file} ${remote_file}
-    [ $? != 0 ] && echo " ERROR: Problem pulling down [${remote_file}]" && return 2
-    diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null
-#    diff <(md5sum ${local_file} | tr -s " " | cut -d " " -f 1) <(curl ${remote_file}.md5 | tr -s " " | cut -d " " -f 1) >& /dev/null
-    [ $? != 0 ] && echo " WARNING: Could not verify this file!" && return 3
-    echo "[VERIFIED]"
-    return 0
-}
-
-checked_get $@
-echo ${1}" is where i get the tared stuff"
-tar xzf "../sources/"${1}
diff --git a/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in
new file mode 100755
index 0000000000000000000000000000000000000000..dc57305463bfcbe27e35f687c842d7f79fad8406
--- /dev/null
+++ b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in
@@ -0,0 +1,5 @@
+
+#!/usr/bin/env bash
+
+source activate root
+@CONDA@ remove --all -y -n @CONDA_ENVIRONMENT_NAME@
diff --git a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in b/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in
deleted file mode 100644
index a3872f3733b1d5a8f0f59216fd445c2869a3abc2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in
+++ /dev/null
@@ -1,17 +0,0 @@
-#set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-#set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-#include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-#message(CONGIFURE_ARGS IS ${CONFIGURE_ARGS})
-
-execute_process(
-  COMMAND sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/configobj_build_step.cmake.in b/CMake/cdat_modules_extra/configobj_build_step.cmake.in
deleted file mode 100644
index 5edd0af433cd3c463b41e15f34784eda764ad27c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/configobj_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@configobj_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/curses_patch_step.cmake.in b/CMake/cdat_modules_extra/curses_patch_step.cmake.in
deleted file mode 100644
index 04c28afdc3c25c94e5f701775e59e59d53797ad1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/curses_patch_step.cmake.in
+++ /dev/null
@@ -1,5 +0,0 @@
-execute_process(
-  WORKING_DIRECTORY @curses_source@
-  COMMAND patch -Np1
-  INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/curses_gcc5.patch
-)
diff --git a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in b/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in
deleted file mode 100644
index 7e0987908b4e3c1665c222ad399702e45e1e705f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in
+++ /dev/null
@@ -1,78 +0,0 @@
-# EZGet Makefile
-#
-# Usage:
-#
-# Change DEBUG as needed.
-# Check the paths specified here for whether they are right for you.
-# Provide a path to fcddrs.h, or copy it (it's in cdat/libcdms/include)
-# make
-#
-#--------------------------------------------------------------------
-LIBNAME = ezget
-#jfp was CDMSLIB = /usr/local/lib
-#jfp was CDMSINC = /usr/local/include
-CDMSLIB = @cdat_EXTERNALS@/lib
-CDMSINC = @cdat_EXTERNALS@/include
-DEBUG = -O 
-# DEBUG = -g -save-temps -O0
-# Requires Absoft FORTRAN
-FC = gfortran
-CC = gcc
-#ARCHOPT = -arch x86_64
-#ARCHOPT = -arch i386
-ARCHOPT = -m64 -mtune=native
-# FOPTS = -fcray-pointer $(ARCHOPT) -W
-FOPTS = -fcray-pointer $(ARCHOPT) -W -Dgfortran -Dsun -D__linux -D__linux_gfortran -fpic -fPIC -I ../../libdrs/lib -I ../include -I @cdat_EXTERNALS@/include
-FFLAGS = $(DEBUG) $(FOPTS)
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-CPPFLAGS = $(ARCHOPT)
-CPP = cpp
-
-FOBJECTS = Src/$(LIBNAME).o
-FINCLUDES = drsdef.h drscom.h cycle.h
-# FINCLUDES = 
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = 
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: lib$(LIBNAME).a 
-#lib$(LIBNAME).so
-
-shared: drsdef.h lib$(LIBNAME).so
-
-lib$(LIBNAME).a: $(OBJECTS)
-	ar rv lib$(LIBNAME).a $?
-
-lib$(LIBNAME).so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -lgfortran -L@cdat_EXTERNALS@/lib -L$(CDMSLIB) -I$(CDMSINC) -lcdms  -shared -o lib$(LIBNAME).so $(OBJECTS)
-
-#--------------------------------------------------------------------
-
-install: lib$(LIBNAME).a
-	cp lib$(LIBNAME).a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/lib$(LIBNAME).a
-#	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f Src/*.o
-	-rm -f *~
-	-rm -f core
-	
-.SUFFIXES: .F .o
-
-.F.o:
-	cd Src ; $(FC) $(FFLAGS) -c ../$<
diff --git a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake b/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake
deleted file mode 100644
index 996ae0a281b231332b6ebeb93d237e6202604528..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-# used variables:
-#   FILE_URL    The url where the file is available
-#   FILE_PATH   The destination for the file
-#   FILE_MD5    The expected md5
-
-# check if the file already exists
-if(EXISTS "${FILE_PATH}")
-  # check md5sum
-  file(MD5 "${FILE_PATH}" output_md5)
-
-  if(${output_md5} STREQUAL ${FILE_MD5})
-    return() # nothing to do
-  endif()
-endif()
-
-# add a build target to download the file
-file(DOWNLOAD "${FILE_URL}" "${FILE_PATH}" STATUS stat)
-list(GET stat 0 exit_code)
-list(GET stat 1 msg)
-
-# fail on error
-if(NOT exit_code EQUAL 0)
-  file(REMOVE "${FILE_PATH}")
-  message(FATAL_ERROR "Error downloading: ${msg}")
-endif()
diff --git a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in b/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in
deleted file mode 100644
index d0ef31f298895d0d0faec07d948438d3e7740e2d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in
+++ /dev/null
@@ -1,14 +0,0 @@
-
-if(APPLE)
-  set(ENV{MACOSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-set(ENV{PATH} $ENV{PATH}:@cdat_EXTERNALS@/bin)
-
-EXECUTE_PROCESS(
-  #    COMMAND sh configure --disable-static --disable-network --disable-zlib --disable-ffserver --disable-ffplay --disable-decoders --enable-shared --enable-swscale --prefix=@ffmpeg_install@
-  COMMAND make
-  WORKING_DIRECTORY "@ffmpeg_source@"
-  RESULT_VARIABLE rv
-  )
diff --git a/CMake/cdat_modules_extra/git_clone.sh.in b/CMake/cdat_modules_extra/git_clone.sh.in
deleted file mode 100755
index 05bb4d3fdb432fbdeeede62b19ac48a8d8df5781..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/git_clone.sh.in
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-cd "@CMAKE_INSTALL_PREFIX@"
-"@GIT_EXECUTABLE@" clone --no-checkout --depth 1 -b @BRANCH@ @GIT_URL@ "@GIT_TARGET@"
-cd "@GIT_TARGET@"
-if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then
-    "@GIT_EXECUTABLE@" checkout @BRANCH@
-else
-    "@GIT_EXECUTABLE@" checkout origin/@BRANCH@
-fi
diff --git a/CMake/cdat_modules_extra/git_update.sh.in b/CMake/cdat_modules_extra/git_update.sh.in
deleted file mode 100755
index a8b3b7954ac3e3abca00882fd62fdff6724c5055..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/git_update.sh.in
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-cd "@SOURCE_DIR@"
-"@GIT_EXECUTABLE@" fetch origin --prune
-if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then
-    "@GIT_EXECUTABLE@" checkout -f @BRANCH@
-else
-    "@GIT_EXECUTABLE@" checkout -f origin/@BRANCH@
-fi
diff --git a/CMake/cdat_modules_extra/gsw_build_step.cmake.in b/CMake/cdat_modules_extra/gsw_build_step.cmake.in
deleted file mode 100644
index 1a344eb810896fdc7df75ffe145ec18cadb1351b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/gsw_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@gsw_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/h5py_build_step.cmake.in b/CMake/cdat_modules_extra/h5py_build_step.cmake.in
deleted file mode 100644
index 47e74002839b9d2521b25c8b7560817efa0cca16..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/h5py_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@h5py_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in b/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in
deleted file mode 100644
index 588e26535f14a8ed134e9a595a0b364df3068236..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in
+++ /dev/null
@@ -1,10 +0,0 @@
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/exsrc/src/h5diff_correct_ansi.c ${WORKING_DIR}/tools/lib/h5diff.c
-  RESULT_VARIABLE errcode
-)
-if("${errcode}" STREQUAL "0")
-    message(STATUS "h5diff.c replaced")
-else()
-    message(FATAL_ERROR "Replacing h5diff.c failed: ${errcode}")
-endif()
diff --git a/CMake/cdat_modules_extra/install.py.in b/CMake/cdat_modules_extra/install.py.in
deleted file mode 100644
index 58398539d5281be5edbc1dbf1757fedb69f9a8d1..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/install.py.in
+++ /dev/null
@@ -1,945 +0,0 @@
-import sys, getopt, os, shutil, string, glob, tempfile, hashlib
-from distutils.core import setup
-
-build_dir = os.getcwd()
-logdir = os.path.join(build_dir, 'logs').replace(" ","\ ")
-
-# Create logs directory if it does not exits
-if not os.path.exists(logdir):
-  os.makedirs(logdir)
-
-base_build_dir = os.path.join(build_dir, '..')
-os.environ['BUILD_DIR'] = build_dir
-
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-here = installation_script_dir
-
-sys.path.append(src_dir)
-sys.path.append(build_dir)
-sys.path.append(installation_script_dir)
-
-control_script_path = os.path.join(installation_script_dir, 'control.py')
-execfile(control_script_path, globals(), globals())
-
-global target_prefix
-target_prefix = sys.prefix
-for i in range(len(sys.argv)):
-    a = sys.argv[i]
-    if a=='--prefix':
-        target_prefix=sys.argv[i+1]
-    sp = a.split("--prefix=")
-    if len(sp)==2:
-        target_prefix=sp[1]
-
-try:
-    os.makedirs(os.path.join(target_prefix,'bin'))
-except Exception,err:
-    pass
-try:
-    os.makedirs(os.path.join(target_prefix,'include'))
-except Exception,err:
-    pass
-try:
-    os.makedirs(os.path.join(target_prefix,'lib'))
-except Exception,err:
-    pass
-
-cdms_include_directory = os.path.join(target_prefix, 'include', 'cdms')
-cdms_library_directory = os.path.join(target_prefix, 'lib')
-
-version_file_path = os.path.join(base_build_dir, 'version')
-Version = open(version_file_path).read().strip()
-version = Version.split(".")
-for i in range(len(version)):
-    try:
-        version[i]=int(version[i])
-    except:
-        version[i]=version[i].strip()
-
-def norm(path):
-    "normalize a path"
-    return os.path.normpath(os.path.abspath(os.path.expanduser(path)))
-
-def testlib (dir, name):
-    "Test if there is a library in a certain directory with basic name."
-    if os.path.isfile(os.path.join(dir, 'lib' + name + '.a')):
-        return 1
-    if os.path.isfile(os.path.join(dir, 'lib' + name + '.so')):
-        return 1
-    if os.path.isfile(os.path.join(dir, 'lib' + name + '.sl')):
-        return 1
-    return 0
-
-def configure (configuration_files):
-    global action, target_prefix
-    options={}
-    execfile(os.path.join(installation_script_dir, 'standard.py'), globals(), options)
-    for file in configuration_files:
-        print >>sys.stderr, 'Reading configuration:', file
-        execfile(os.path.join(src_dir, file), globals(), options)
-
-    # Retrieve action
-    action = options['action']
-    # Establish libraries and directories for CDUNIF/CDMS
-    netcdf_directory = norm(options.get('netcdf_directory',os.environ['EXTERNALS']))
-    netcdf_include_directory = norm(options.get('netcdf_include_directory',
-                                           os.path.join(os.environ['EXTERNALS'],'include')))
-
-    #hdf5_library_directory = norm(os.path.join(os.environ.get('HDF5LOC',os.path.join(os.environ["EXTERNALS"])), 'lib'))
-    if (sys.platform in ['mac',]):
-        cdunif_library_directories = [cdms_library_directory,"/usr/X11R6/lib"]
-    else:
-        cdunif_library_directories = [cdms_library_directory]
-
-    options['CDMS_INCLUDE_DAP']="yes"
-##     if options.get('CDMS_INCLUDE_DAP','no')=='yes':
-##         netcdf_include_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'include','libnc-dap'))
-##         netcdf_library_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'lib'))
-##         dap_include=[norm(os.path.join(options['CDMS_DAP_DIR'],'include','libdap'))]
-##         dap_lib_dir=[norm(os.path.join(options['CDMS_DAP_DIR'],'lib'))]
-## ##         dap_lib=['dap','stdc++','nc-dap','dap','curl','z','ssl','crypto','dl','z','xml2','rx','z']
-## ##         if (sys.platform in ['linux2',]):
-## ##            dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','xml2']
-## ##         elif (sys.platform in ['darwin',]):
-## ##            dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2','z']
-##         dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2']
-##         dap_lib = ['stdc++']
-##         dap_lib_dir=[]
-##         Libs=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','ncdap-config'))+' --libs').readlines()
-##         Libs+=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','dap-config'))+' --client-libs').readlines()
-##         for libs in Libs:
-##             libs=libs.split()
-##             for l in libs:
-##                 if l[:2]=='-l':
-##                     dap_lib.append(l[2:])
-##                 elif l[:2]=='-L'and l[2:] not in dap_lib_dir:
-##                     dap_lib_dir.append(l[2:])
-##         dap_lib.append("dap")
-##         dap_lib.append("xml2")
-##         netcdfname='nc-dap'
-## ##         print 'daplib:',dap_lib
-##     else:
-    if 1:
-        ## dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')]
-        dap_include = []
-        Dirs=os.popen('%s --cflags' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0]
-        for d in Dirs.split():
-            if d[:2]=="-I":
-                dnm = d[2:]
-                if not dnm in dap_include:
-                    dap_include.append(dnm)
-        dap_lib = ['stdc++']
-        dap_lib = []
-        dap_lib_dir=[]
-        ## Libs=os.popen(norm(os.path.join(os.environ['EXTERNALS'],'bin','nc-config'))+' --libs').readlines()
-        Libs=os.popen('%s --libs' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()
-        for libs in Libs:
-            libs=libs.split()
-            for l in libs:
-                if l[:2]=='-l':
-                    dap_lib.append(l[2:])
-                elif l[:2]=='-L'and l[2:] not in dap_lib_dir:
-                    if l[-3:]!='lib':
-                        l+='/lib'
-                    dap_lib_dir.append(l[2:])
-
-##         if enable_netcdf3==True:
-##             dap_include=[]
-##             dap_lib_dir=[]
-##         else:
-##             dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')]
-##             dap_lib_dir = [os.path.join(hdf5path,"lib"),os.path.join(os.environ['EXTERNALS'],'lib')]
-##         if enable_netcdf3 is True:
-##             daplib=[]
-##         else:
-##             dap_lib=['hdf5_hl','hdf5','m','z','dap','nc-dap','dapclient','curl','stdc++','xml2']
-##             # for now turn off the dap crap
-##             dap_lib=['hdf5_hl','hdf5','m','z']
-        netcdfname='netcdf'
-
-    if options.get('CDMS_INCLUDE_HDF','no')=='yes':
-        hdf_libraries = ['mfhdf','df','jpeg','z']
-        hdf_include=[norm(os.path.join(options['CDMS_HDF_DIR'],'include'))]
-        hdf_lib_dir=[norm(os.path.join(options['CDMS_HDF_DIR'],'lib'))]
-    else:
-        hdf_libraries = []
-        hdf_include=[]
-        hdf_lib_dir=[]
-
-    PNG_VERSION=""
-    if (sys.platform in ['darwin']):
-        PNG_VERSION="15"
-
-    grib2_libraries = ["grib2c","png"+PNG_VERSION,"jasper"]
-    ## if netcdf_library_directory not in cdunif_library_directories:
-    ##     cdunif_library_directories.append(netcdf_library_directory)
-    cdunif_include_directories = [cdms_include_directory]
-    ## if netcdf_include_directory not in cdunif_include_directories:
-    ##     cdunif_include_directories.append(netcdf_include_directory)
-
-
-    if sys.platform == "sunos5":
-        cdunif_include_directories.append('/usr/include')
-
-    drs_file = "@cdatEXTERNALS@/lib/libdrs.a"
-
-    # Establish location of X11 include and library directories
-    if options['x11include'] or options['x11libdir']:
-        if options['x11include']:
-            options['x11include'] = norm(options['x11include'])
-        if options['x11libdir']:
-            options['x11libdir'] = norm(options['x11libdir'])
-    else:
-        for x in x11search:
-            if os.path.isdir(x):
-                if options['x11include']:
-                    options['x11include'].append(os.path.join(x, 'include'))
-                    options['x11libdir'].append(os.path.join(x, 'lib'))
-                else:
-                    options['x11include']=[norm(os.path.join(x, 'include'))]
-                    options['x11libdir']=[norm(os.path.join(x, 'lib'))]
-        else:
-            for w in x11OSF1lib:
-                if testlib(w, 'X11'):
-                    if not options['x11libdir']:
-                        options['x11libdir'] = [norm(w),]
-                    else:
-                        options['x11libdir'].append(norm(w))
-            for w in x11OSF1include:
-                if os.path.isdir(w):
-                    if not options['x11include']:
-                        options['x11include'] = [norm(w),]
-                    else:
-                        options['x11include'].append(norm(w))
-    # Check that we have both set correctly.
-    if not (options['x11include'] and \
-            options['x11libdir']
-            ):
-        print >>sys.stderr, """
-Failed to find X11 directories. Please see README.txt for instructions.
-"""
-        print options
-        raise SystemExit, 1
-
-    # Write cdat_info.py
-    os.chdir(installation_script_dir)
-    print 'Version is: ',Version
-    f = open(os.path.join(build_dir, 'cdat_info.py'), 'w')
-    sys.path.append(build_dir)
-    print >> f,"""
-Version = '%s'
-ping_checked = False
-check_in_progress = False
-def version():
-    return %s
-""" % (Version,str(version))
-    if options.get('CDMS_INCLUDE_DRS','no') == 'yes':
-        print >>f, """
-def get_drs_dirs ():
-    #import Pyfort, os
-    import os
-    #c = Pyfort.get_compiler('default')
-    drs_dir, junk = os.path.split(drs_file)
-    #return c.dirlist + [drs_dir]
-    return [drs_dir,"/usr/local/gfortran/lib","/usr/local/lib"]
-
-def get_drs_libs ():
-    #import Pyfort
-    #c = Pyfort.get_compiler('default')
-    return ['drs','gfortran'] + %s
-""" % repr(options.get("COMPILER_EXTRA_LIBS",[]))
-    else:
-        print >>f, """
-def get_drs_dirs ():
-    return []
-def get_drs_libs():
-    return []
-"""
-
-    print >>f, """\
-
-sleep=60 #minutes  (int required)
-
-actions_sent = {}
-
-SOURCE = 'CDAT'
-
-def get_version():
-  return Version
-
-def get_prefix():
-  import os,sys
-  try:
-    uv_setup_pth = os.environ["UVCDAT_SETUP_PATH"]
-    if os.uname()[0] == "Darwin":
-      uv_setup_pth = os.path.join(uv_setup_pth,
-          "Library","Frameworks","Python.framework","Versions",
-          "%%i.%%i" %% (sys.version_info.major,sys.version_info.minor)
-          )
-    return uv_setup_pth
-  except KeyError:
-    raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.")
-
-def get_sampledata_path():
-  import os
-  try:
-    return os.path.join(os.environ["UVCDAT_SETUP_PATH"],
-                        "share", "uvcdat", "sample_data")
-  except KeyError:
-    raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.")
-
-def runCheck():
-    import cdat_info,os
-    if cdat_info.ping_checked is False:
-        check_in_progress = True
-        val = None
-        envanom = os.environ.get("UVCDAT_ANONYMOUS_LOG",None)
-        if envanom is not None:
-            if envanom.lower() in ['true','yes','y','ok']:
-                val = True
-            elif envanom.lower() in ['false','no','n','not']:
-                val = False
-            else:
-                import warnings
-                warnings.warn("UVCDAT logging environment variable UVCDAT_ANONYMOUS_LOG should be set to 'True' or 'False', you have it set to '%%s', will be ignored" %% envanom)
-        if val is None: # No env variable looking in .uvcdat
-            fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog")
-            if os.path.exists(fanom):
-                f=open(fanom)
-                for l in f.readlines():
-                    sp = l.strip().split("UVCDAT_ANONYMOUS_LOG:")
-                    if len(sp)>1:
-                        try:
-                            val = eval(sp[1])
-                        except:
-                            pass
-                f.close()
-
-        reload(cdat_info)
-        return val
-
-def askAnonymous(val):
-        import cdat_info,os
-        while cdat_info.ping_checked is False and not val in [True, False]: # couldn't get a valid value from env or file
-            val2 = raw_input("Allow anonymous logging usage to help improve UV-CDAT? (you can also set the environment variable UVCDAT_ANONYMOUS_LOG to yes or     no) [yes/no]")
-            if val2.lower() in ['y','yes','ok']:
-                val = True
-            elif val2.lower() in ['n','no','not']:
-                val = False
-            if val in [True,False]: # store result for next time
-                try:
-                    fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog")
-                    if not os.path.exists(os.path.join(os.environ["HOME"],".uvcdat")):
-                        os.makedirs(os.path.join(os.environ["HOME"],".uvcdat"))
-                    f=open(fanom,"w")
-                    print >>f, "#Store information about allowing UVCDAT anonymous logging"
-                    print >>f, "# Need sto be True or False"
-                    print >>f, "UVCDAT_ANONYMOUS_LOG: %%s" %% val
-                    f.close()
-                except Exception,err:
-                    pass
-        else:
-            if cdat_info.ping_checked:
-                val = cdat_info.ping
-        cdat_info.ping = val
-        cdat_info.ping_checked = True
-        check_in_progress = False
-
-def pingPCMDIdb(*args,**kargs):
-    import cdat_info,os
-    while cdat_info.check_in_progress:
-       reload(cdat_info)
-    val = cdat_info.runCheck()
-    if val is False:
-      cdat_info.ping_checked = True
-      cdat_info.ping = False
-      return
-    try:
-      if not cdat_info.ping:
-        return
-    except:
-      pass
-    cdat_info.askAnonymous(val)
-    import threading
-    kargs['target']=pingPCMDIdbThread
-    kargs['args']=args
-    t = threading.Thread(**kargs)
-    t.start()
-
-def pingPCMDIdbThread(*args,**kargs):
-    import threading
-    kargs['target']=submitPing
-    kargs['args']=args
-    t = threading.Thread(**kargs)
-    t.start()
-    import time
-    time.sleep(5) # Lets wait 5 seconds top for this ping to work
-    if t.isAlive():
-        try:
-            t._Thread__stop()
-        except:
-            pass
-def submitPing(source,action,source_version=None):
-  try:
-    import urllib2,sys,os,cdat_info,hashlib,urllib
-    if source in ['cdat','auto',None]:
-      source = cdat_info.SOURCE
-    if cdat_info.ping:
-      if not source in actions_sent.keys():
-        actions_sent[source]=[]
-      elif action in actions_sent[source]:
-        return
-      else:
-        actions_sent[source].append(action)
-      data={}
-      uname = os.uname()
-      data['platform']=uname[0]
-      data['platform_version']=uname[2]
-      data['hashed_hostname']=hashlib.sha1(uname[1]).hexdigest()
-      data['source']=source
-      if source_version is None:
-        data['source_version']=cdat_info.get_version()
-      else:
-        data['source_version']=source_version
-      data['action']=action
-      data['sleep']=cdat_info.sleep
-      data['hashed_username']=hashlib.sha1(os.getlogin()).hexdigest()
-      urllib2.urlopen('http://uv-cdat.llnl.gov/UVCDATUsage/log/add/',urllib.urlencode(data))
-  except Exception,err:
-    pass
-
-CDMS_INCLUDE_DAP = %s
-CDMS_DAP_DIR = %s
-CDMS_HDF_DIR = %s
-CDMS_GRIB2LIB_DIR = %s
-CDMS_INCLUDE_GRIB2LIB = %s
-CDMS_INCLUDE_DRS = %s
-CDMS_INCLUDE_HDF = %s
-CDMS_INCLUDE_PP = %s
-CDMS_INCLUDE_QL = %s
-drs_file = %s
-netcdf_directory = %s
-netcdf_include_directory = %s
-cdunif_include_directories = %s + %s + %s
-cdunif_library_directories = %s + %s + %s + get_drs_dirs()
-cdunif_libraries = %s + %s + get_drs_libs() + %s + %s
-x11include = %s
-x11libdir = %s
-mathlibs = %s
-action = %s
-externals = %s
-""" % (
-        repr(options.get('CDMS_INCLUDE_DAP','no')),
-        repr(options.get('CDMS_DAP_DIR','.')),
-        repr(options.get('CDMS_HDF_DIR','.')),
-        repr(options.get('CDMS_GRIB2LIB_DIR',os.environ['EXTERNALS'])),
-        repr(options.get('CDMS_INCLUDE_GRIB2LIB',"yes")),
-        repr(options['CDMS_INCLUDE_DRS']),
-        repr(options['CDMS_INCLUDE_HDF']),
-        repr(options['CDMS_INCLUDE_PP']),
-        repr(options['CDMS_INCLUDE_QL']),
-        repr(drs_file),
-        repr(netcdf_directory),
-        repr(netcdf_include_directory),
-        repr(cdunif_include_directories),repr(dap_include),repr(hdf_include),
-        repr(cdunif_library_directories),repr(dap_lib_dir),repr(hdf_lib_dir),
-        repr(['cdms', netcdfname]),repr(dap_lib),repr(hdf_libraries),repr(grib2_libraries),
-        repr(options['x11include']),
-        repr(options['x11libdir']),
-        repr(options['mathlibs']),
-        repr(options['action']),
-        repr(os.environ['EXTERNALS']),
-        )
-    if enable_aqua:
-	print >> f,'enable_aqua = True'
-    else:
-        print >>f, 'enable_aqua = False'
-    f.close()
-    cdat_info_path = os.path.join(os.environ['BUILD_DIR'], 'cdat_info')
-    if not norun:
-      # Install the configuration
-      #would be best to add 'clean' but it gives stupid warning error
-      sys.argv[1:]=['-q', 'install', '--prefix=%s' % target_prefix]
-      setup (name="cdat_info",
-       version="0.0",
-       package_dir = { 'cdat_info' : os.path.dirname(cdat_info_path)},
-      )
-      os.system('/bin/rm -fr build')
-
-    py_prefix = os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages')
-    cdat_info_src_path = os.path.join(build_dir, 'cdat_info.py')
-    cdat_info_dst_path = os.path.join(py_prefix, 'cdat_info.py')
-    if os.path.isfile(cdat_info_src_path):
-        shutil.copyfile(cdat_info_src_path, cdat_info_dst_path)
-    else:
-       print>>sys.stderr, 'Failed to copy %s to %s' % (cdat_info_src_path, cdat_info_dst_path)
-
-    os.chdir(here)
-    print >>sys.stderr, 'Configuration installed.'
-
-def usage():
-    f = open('HELP.txt')
-    lines = f.readlines()
-    f.close()
-    for line in lines[10:-9]:
-        sys.stdout.write(line)
-    print '\tDefault Packages'
-    print '\t----------------'
-    packages.append('\n\tContributed Packages\n\t--------------------')
-    #execfile('installation/contrib.py',globals(),globals())
-    for p in packages:
-        print '\t\t',p
-
-def main(arglist):
-    global norun, echo, force, do_configure, silent, action, logdir, enable_aqua,target_prefix, enable_netcdf3, hdf5path,zpath
-    enable_aqua = False
-    enable_cdms1 = False
-    enable_netcdf3=False
-    optlist, control_names = getopt.getopt(arglist,
-                       "c:defhnPl",
-                       ["enable-cdms-only",
-                        "configuration=",
-                        "debug",
-                        "prefix=",
-                        "echo",
-                        "force",
-                        "help",
-                        "with-externals=",
-                        "norun",
-                        "PCMDI",
-                        "pcmdi",
-                        "psql","enable-psql",
-                        "enable-hdf4","enable-HDF4",
-                        "with-HDF4=","with-hdf4=",
-                        "disable-hdf4","disable-HDF4",
-                        "disable-contrib",
-                        "enable-pp",
-                        "enable-drs","enable-DRS",
-                        "disable-externals-build",
-                        "disable-pp",
-                        ## Bellow are the arguments that could be passed to exsrc, nothing done with them
-                        "disable-R","disable-r",
-                        #"disable-VTK","disable-vtk",
-                        "disable-XGKS","disable-xgks",
-                        "disable-Pyfort","disable-pyfort",
-                        "disable-NetCDF","disable-netcdf","disable-NETCDF",
-                        "disable-Numeric","disable-numeric",
-                        "disable-gplot","disable-GPLOT","disable-Gplot",
-                        "disable-gifsicle","disable-GIFSICLE",
-                        "disable-gifmerge","disable-GIFMERGE",
-                        "disable-pbmplus","disable-PBMPLUS",
-                        "disable-netpbm","disable-NETPBM",
-                        "disable-Pmw","disable-pmw",
-                        "disable-ioapi",
-                        "disable-cairo",
-                        "disable-ffmpeg",
-                        "disable-freetype",
-                        "disable-sampledata",
-                        "enable-ioapi",
-                        "enable-R","enable-r",
-                        "enable-numpy","disable-numpy",
-                        "enable-scipy","disable-scipy",
-                        "enable-ipython","disable-ipython",
-                        #"enable-VTK","enable-vtk",
-                        "enable-XGKS","enable-xgks",
-                        "enable-Pyfort","enable-pyfort",
-                        "enable-NetCDF","enable-netcdf","enable-NETCDF","enable-netcdf-fortran","enable-NETCDF-Fortran",
-                        "enable-Numeric","enable-numeric",
-                        "enable-gplot","enable-GPlot","enable-GPLOT",
-                        "enable-gifsicle","enable-GIFSICLE",
-                        "enable-gifmerge","enable-GIFMERGE",
-                        "enable-pbmplus","enable-PBMPLUS",
-                        "enable-netpbm","enable-NETPBM",
-                        "enable-Pmw","enable-pmw",
-                        "enable-aqua","enable-Aqua","enable-AQUA",
-                        "enable-cairo",
-                        "enable-ffmpeg",
-                        "enable-freetype",
-                        "enable-cdms1",
-                        "enable-netcdf3",
-                        "enable-spanlib",
-                        "disable-spanlib"
-                        "disable-tkbuild",
-                        "enable-qt",
-                        "enable-vcs-legacy",
-                        "enable-qt-framework",
-                        "with-qt=",
-                        "with-qt-lib=",
-                        "with-qt-inc=",
-                        "with-qt-bin=",
-                        "qt-debug",
-                        "list",
-                       ]
-                    )
-    configuration_files = []
-    nodap=0
-    nopp=0
-    nohdf=0
-    selfhdf=0
-    selfdap=0
-    selfpp=0
-    showlist=0
-    qtfw=False
-    qtinc=None
-    qtlib=None
-    qtbin=None
-    qt=False
-    control_names = ['contrib']
-    sampleData = True
-##     prefix_target = sys.exec_prefix
-    externals = os.environ.get("EXTERNALS",os.path.join(sys.prefix,"Externals"))
-    hdf5path = None
-    zpath = None
-
-    for i in range(len(optlist)):
-        letter=optlist[i][0]
-        if letter == "--enable-vcs-legacy":
-            qt=True
-        if letter == "--enable-qt":
-            qt=True
-        if letter == "--enable-qt-framework":
-            qtfw=True
-        if letter == "--with-qt":
-            qtinc=os.path.join(optlist[i][1],"include")
-            qtlib=os.path.join(optlist[i][1],"lib")
-            qtbin=os.path.join(optlist[i][1],"bin")
-        if letter == "--with-qt-inc":
-            qtinc=optlist[i][1]
-        if letter == "--with-qt-bin":
-            qtbin=optlist[i][1]
-        if letter == "--with-qt-lib":
-            qtlib=optlist[i][1]
-        if letter == "--enable-cdms-only":
-            control_names = ['cdmsonly']+control_names
-            if 'contrib' in control_names:
-                control_names.pop(control_names.index('contrib'))
-        elif letter == "--with-externals":
-            externals = optlist[i][1]
-        elif letter in ["-c",  "--configuration"]:
-            m = False
-            n = optlist[i][1]
-            if os.path.isfile(n):
-                m = n
-            elif os.path.isfile(n + '.py'):
-                m = n + '.py'
-            elif os.path.isfile(os.path.join('installation', n)):
-                m = os.path.join('installation', n)
-            elif os.path.isfile(os.path.join('installation', n + '.py')):
-                m = os.path.join('installation', n + '.py')
-            if m:
-                configuration_files.append(m)
-            else:
-                print >>sys.stderr, "Cannot find configuration file", optlist[i][1]
-            force = 1
-            do_configure = 1
-        elif letter in ["-d", "--debug"]:
-            debug_file = os.path.join('installation','debug.py')
-            configuration_files.append(debug_file)
-            force = 1
-            do_configure = 1
-        elif letter in ["-e", "--echo"]:
-            echo = 1
-        elif letter in ["--enable-cdms1"]:
-            enable_cdms1 = True
-        elif letter in ["--enable-netcdf3"]:
-            enable_netcdf3 = True
-	elif letter in ["--enable-aqua","--enable-Aqua","--enable-AQUA"]:
-	    enable_aqua = True
-        elif letter in ["-f", "--force"]:
-            force = 1
-            do_configure = 1
-        elif letter in ["-h", "--help"]:
-            usage()
-            raise SystemExit, 1
-        elif letter in ["-P", "--PCMDI", "--pcmdi"]:
-            configuration_files.append(os.path.join('installation', 'pcmdi.py'))
-            force=1
-            do_configure=1  # Need libcdms built a certain way too.
-        elif letter in ["--psql", "--enable-psql"]:
-            configuration_files.append(os.path.join('installation', 'psql.py'))
-            do_configure=1  # Need libcdms built a certain way too.
-##         elif letter in ["--with-OpenDAP", "--with-opendap", "--with-OPENDAP","--enable-opendap","--enable-OpenDAP","--enable-OPENDAP"]:
-##             configuration_files.append(os.path.join('installation', 'DAP.py'))
-##             do_configure=1  # Need libcdms built a certain way too.
-##             selfdap=1
-##         elif letter in ["--with-HDF4", "--with-hdf4",'--enable-hdf4','--enable-HDF4']:
-##             configuration_files.append(os.path.join('installation', 'HDF.py'))
-##             do_configure=1  # Need libcdms built a certain way too.
-##             selfhdf=1
-        elif letter in ["--with-hdf5",]:
-            hdf5path = optlist[i][1]
-        elif letter in ["--with-z",]:
-            zpath = optlist[i][1]
-        elif letter in ["--prefix"]:
-            target_prefix = optlist[i][1]
-        elif letter in ['--enable-drs','--enable-DRS']:
-            configuration_files.append(os.path.join('installation', 'pcmdi.py'))
-            do_configure=1  # Need libcdms built a certain way too.
-        elif letter in ['--enable-pp','--enable-PP']:
-            configuration_files.append(os.path.join('installation', 'pp.py'))
-            do_configure=1  # Need libcdms built a certain way too.
-            selfpp=1
-##         elif letter in ["--enable-NetCDF","--enable-NETCDF","--enable-netcdf",
-##                         "--enable-netcdf-fortran",
-##                         "--disable-opendap","--disable-OpenDAP","--disable-OPENDAP"]:
-##             nodap=1
-##         elif letter in ["--disable-hdf4","--disable-HDF4"]:
-##             nohdf=1
-        elif letter in ["--disable-pp","--disable-PP"]:
-            nohdf=1
-        elif letter in ["--disable-sampledata",]:
-            sampleData = False
-        elif letter in ["-n", "--norun"]:
-            norun = 1
-        elif letter in ['--list','-l']:
-            showlist=1
-        elif letter in ['--disable-contrib']:
-            for i in range(len(control_names)):
-                if control_names[i]=='contrib':
-                    control_names.pop(i)
-                    i=i-1
-    CDMS_INCLUDE_DAP='yes'
-    if nopp==1 and selfpp==1:
-        raise "Error you chose to both enable and disable PP support !"
-    if nohdf==1 and selfhdf==1:
-        raise "Error you chose to both enable and disable HDF !"
-##     if (nodap==0 and selfdap==0) and (sys.platform in ['linux2','darwin']):
-##         configuration_files.append(os.path.join('installation', 'DAP.py'))
-##         do_configure=1  # Need libcdms built a certain way too.
-##     if (nohdf==0 and selfhdf==0) and (sys.platform in ['linux2','darwin']):
-##         configuration_files.append(os.path.join('installation', 'HDF.py'))
-##         do_configure=1  # Need libcdms built a certain way too.
-    if (nopp==0 and selfpp==0) and (sys.platform in ['linux2','darwin']):
-        configuration_files.append(os.path.join('installation', 'pp.py'))
-        do_configure=1  # Need libcdms built a certain way too.
-
-    if hdf5path is None: hdf5path= os.path.join(externals)
-    if zpath is None: zpath= externals
-    os.environ['EXTERNALS']=externals
-
-    control_files = []
-    for n in control_names:
-        m = ''
-        if os.path.isfile(n):
-            m = n
-        elif os.path.isfile(n + '.py'):
-            m = n + '.py'
-        elif os.path.isfile(os.path.join('installation', n)):
-            m = os.path.join('installation', n)
-        elif os.path.isfile(os.path.join('installation', n + '.py')):
-            m = os.path.join('installation', n + '.py')
-        elif os.path.isfile(os.path.join(src_dir, 'installation', n + '.py')):
-            m = os.path.join(src_dir, 'installation', n + '.py')
-
-        if m:
-            control_files.append(m)
-        else:
-            print >>sys.stderr, 'Cannot find control file', n
-            raise SystemExit, 1
-
-    for control_file in control_files:
-        print 'Running:',control_file
-        execfile(control_file, globals(), globals())
-
-    if showlist:
-        print 'List of Packages that would be installed:'
-        for p in packages:
-            print p
-        sys.exit()
-    if force:
-        os.system('./scripts/clean_script')
-
-    sys.path.insert(0,os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages'))
-    if do_configure:
-        force = 1
-        if os.path.isfile(os.path.join(build_dir, 'cdat_info.py')):
-            os.unlink(os.path.join(build_dir, 'cdat_info.py'))
-        print >>sys.stderr, 'Configuring & installing scripts.'
-        configure(configuration_files)
-        images_path = os.path.join(src_dir, 'images')
-        os.chdir(images_path)
-        scripts = glob.glob('*')
-        for script in scripts:
-            if script[-1] == '~': continue
-            if script == "README.txt": continue
-            target = os.path.join(target_prefix, 'bin', script)
-            if os.path.isfile(target): os.unlink(target)
-            shutil.copy(script, target)
-        os.chdir(here)
-    else:
-        import cdat_info
-        action = cdat_info.action
-
-    # Install CDMS
-    cdms_library_file = os.path.join(cdms_library_directory, 'libcdms.a')
-    #if force or not os.path.isfile(cdms_library_file):
-    #    install('libcdms', action)
-    #    if (sys.platform in ['darwin',]):
-    #       os.system('ranlib '+os.path.join(target_prefix,'lib','libcdms.a'))
-
-    # Install Packages
-    package_errors=0
-    package_failed=[]
-    if enable_cdms1:
-        packages.append("Packages/regrid")
-        packages.append("Packages/cdms")
-    for p in packages:
-        h = os.getcwd()
-        oldcmd=action["setup.py"]+""
-        action['setup.py'] = action['setup.py'].strip()[:-1]+" build -b "+ os.environ['BUILD_DIR']+"/"+p
-        try:
-            if p == "Packages/vcs":
-                action["setup.py"]=oldcmd.strip()[:-1]+" --old-and-unmanageable; "
-                if qtfw:
-                    action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt-framework ; "
-                if qt:
-                    action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt ; "
-                if qtinc is not None:
-                    action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-inc=%s ; "%qtinc
-                if qtlib is not None:
-                    action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-lib=%s ; "%qtlib
-                if qtbin is not None:
-                    action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-bin=%s ; "%qtbin
-            install(p, action)
-        except:
-            package_errors+=1
-            package_failed.append(p)
-            os.chdir(h)
-            print >>sys.stderr, 'Error: Installation of Package:',p,'FAILED'
-        action["setup.py"]=oldcmd
-
-    # Celebrate
-    if echo:
-        print "Simulated build complete."
-    elif not silent:
-        print >>sys.stderr, finish
-        if package_errors!=0:
-            print >>sys.stderr, '\n              --- WARNING ---\n'
-            print >>sys.stderr,package_errors,'Packages reported as FAILED, see logs\n'
-            for p in package_failed:
-                print >>sys.stderr,'\t\t',p
-            print >>sys.stderr
-        print >>sys.stderr, '******************************************************\n'
-        """
-        ******************************************************
-        CDAT has been installed in %s .
-        Please make sure all modules built successfully
-        (see above build messages)
-        ******************************************************
-        """ %(target_prefix,)
-
-def _install(file, action):
-    h = os.getcwd()
-    absfile = os.path.abspath(file)
-    print 'absfile ', absfile
-    dirname, basename = os.path.split(absfile)
-    dirfinal = os.path.split(dirname)[-1]
-    os.chdir(dirname)
-    name, ext = os.path.splitext(basename)
-    if ext.lower() == ".pfp":
-        p1 = action['*.pfp']
-    elif action.has_key(absfile):
-        p1 = action[absfile]
-    elif action.has_key(file):
-        p1 = action[file]
-    elif action.has_key(basename):
-        p1 = action[basename]
-    else:
-        print "Do not know what to do with", file, "in", dirname
-        print >>sys.stderr, "Do not know what to do with", file, "in", dirname
-        raise SystemExit, 1
-
-    if log:
-        logfile = os.path.join(logdir, dirfinal+".LOG")
-        if not silent:
-            print >>sys.stderr, "Processing", dirfinal + ', log =', logfile
-    else:
-        logfile = tempfile.mktemp()
-        if not silent:
-            print >>sys.stderr, "Processing", dirfinal
-    p1 = p1 % { 'filename': file }
-    sep = " > %s 2>&1 ; " % logfile
-    p = sep.join(p1.split(";"))
-##     os.environ["CFLAGS"]="%s -L%s/lib" % (os.environ.get("CFLAGS",""), os.environ["EXTERNALS"])
-    add_lib = "-L%s/lib" % (os.environ["EXTERNALS"],)
-    cflags_current = os.environ.get("CFLAGS","")
-    if cflags_current.find(add_lib) == -1:
-        os.environ["CFLAGS"]="%s %s" % (cflags_current, add_lib)
-    p = 'env CFLAGS="%s" %s' % (os.environ["CFLAGS"],p)
-    if echo:
-        print >> sys.stderr, p
-    print norun
-    if norun:
-        r = 0
-    else:
-        #print '====>executing: ', p
-        r = os.system(p)
-    if r:
-        print >>sys.stderr, "Install failed in directory", dirname
-        print >>sys.stderr, "Log=", logfile
-        raise SystemExit, 1
-    elif not log and not norun:
-        os.unlink(logfile)
-
-    f = open(os.path.join(build_dir, 'rebuild.py'), 'w')
-    print >>f, """
-import os
-j = os.system(%s)
-if j:
-    print 'Compilation failed'
-    raise SystemExit, 1
-""" % (repr(p1+ " 1>LOG.rebuild"),)
-    f.close()
-    os.chdir(h)
-
-def install (arg, action):
-    arg = os.path.normpath(arg)
-    installer = ''
-    arg = os.path.join(src_dir, arg)
-    if os.path.isdir(arg):
-        for x in (glob.glob(os.path.join(arg, '*.pfp')) + \
-                 ['autogen.sh',
-                  'install.py',
-                  'setup.py',
-                  'install_script',
-                  'Makefile',
-                  'makefile'] ):
-            name = os.path.join(arg,x)
-            if os.path.isfile(name):
-                installer = name
-                break
-        else:
-            print >>sys.stderr, "Cannot find installation instructions in", arg
-            raise SystemExit, 1
-    elif os.path.isfile(arg):
-        installer = arg
-        designator, junk = os.path.split(arg)
-    else:
-        print >>sys.stderr, "Cannot find", arg
-        raise SystemExit
-
-    _install(installer, action)
-
-
-if __name__ == "__main__":
-    arglist = sys.argv[1:]
-    main(arglist)
-    ## This parts creates links from Externals...
-    try:
-        import cdat_info
-        externals = cdat_info.externals
-    except:
-        externals = os.path.join(sys.prefix,"Externals")
-    externals = os.environ.get("EXTERNALS",externals)
-    externals_path = os.path.join(externals,'bin')
-    files = os.listdir(externals_path)
-    for file in files:
-        fnm = os.path.join(sys.prefix,'bin',file)
-        if not os.path.exists(fnm) and not os.path.islink(fnm):
-            try:
-                os.symlink(os.path.join(externals_path,file),fnm)
-            except:
-                pass
-
diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
new file mode 100755
index 0000000000000000000000000000000000000000..eec135fcb1aff12d3fa376f782ffda0f5e549bcb
--- /dev/null
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy
+
+source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@
+for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
+    cd @cdat_SOURCE_DIR@/Packages/${pkg}
+    rm -rf build
+    if [ ${pkg} == "vcs" ]; then
+        python setup.py install --old-and-unmanageable
+    else
+        python setup.py install
+    fi
+done
+
diff --git a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in b/CMake/cdat_modules_extra/jasper_configure_step.cmake.in
deleted file mode 100644
index ff0cccad7967e05cf78fe6ce2df7251dab2364f7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in
+++ /dev/null
@@ -1,11 +0,0 @@
-# On linux 64, gdal picks the static jasper library, make sure only shared libraries
-# are built (Alex Pletzer)
-
-# Make sure to pick up image and other libraries built by the superbuild
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-EXECUTE_PROCESS(
-  COMMAND sh configure --enable-shared --disable-static --prefix=@jasper_install@
-  WORKING_DIRECTORY "@jasper_source@"
-  RESULT_VARIABLE rv
-  )
diff --git a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in b/CMake/cdat_modules_extra/jpeg_install_step.cmake.in
deleted file mode 100644
index ab724cc8ae1069dbf02f24e28d6be1b20ccea997..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in
+++ /dev/null
@@ -1,28 +0,0 @@
-
-execute_process(
-  COMMAND make install ${INSTALL_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-#cp build/jpeg*/lib* /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/lib
-#cp build/jpeg*/*.h /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/include
-
-file(GLOB jpeglibs "@jpeg_source@/lib*")
-file(GLOB jpegheaders "@jpeg_source@/*.h")
-
-
-foreach(lib ${jpeglibs})
-  execute_process(
-    COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${lib} @jpeg_install@/lib
-    RESULT_VARIABLE res
-    OUTPUT_VARIABLE CDAT_OUT
-    OUTPUT_VARIABLE CDAT_ERR)
-endforeach()
-
-foreach(header ${jpegheaders})
-    execute_process(
-      COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${header} @jpeg_install@/include
-      RESULT_VARIABLE res
-      OUTPUT_VARIABLE CDAT_OUT
-      OUTPUT_VARIABLE CDAT_ERR)
-endforeach()
\ No newline at end of file
diff --git a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in b/CMake/cdat_modules_extra/lats_Makefile.gfortran.in
deleted file mode 100644
index 566a6b5bc51f1eb4050c5e23eae71f832505ac69..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in
+++ /dev/null
@@ -1,208 +0,0 @@
-# Generated automatically from Makefile.in by configure.
-# -*-Mode: indented-text;-*-
-# Makefile:    LATS makefile
-#
-# Copyright:   1996, Regents of the University of California
-#	       This software may not be distributed to others without
-#	       permission of the author.
-#
-# Author:      Bob Drach, Lawrence Livermore National Laboratory
-#              drach@llnl.gov
-#
-# Version:     $Id: Makefile.in,v 1.12 1996/10/29 00:20:44 drach Exp $
-#
-# Revision History:
-#
-# $Log: Makefile.in,v $
-# Revision 1.12  1996/10/29  00:20:44  drach
-# - Removed name conflicts with CDMS
-#
-# Revision 1.11  1996/10/22 19:04:57  fiorino
-# latsgrib bug in .ctl creator
-#
-# Revision 1.10  1996/10/16 22:09:51  drach
-# - Added automatic gribmap generation
-# - Restricted LATS_GRADS_GRIB convention to one grid per file
-#
-# Revision 1.9  1996/09/30 18:54:46  drach
-# - permit installation without the sources being present
-# - separate FORTRAN debug flag, since -O doesn't work on the Cray
-#
-# Revision 1.8  1996/09/17 16:52:31  drach
-# - Misc. cleanup
-#
-# Revision 1.7  1996/08/29 19:27:17  drach
-# - Cleaned up configuration macros, Makefile.in for portability
-#
-# Revision 1.6  1996/08/27 19:39:03  drach
-# - Added FORTRAN test
-# - Ported to other UNIX platforms
-#
-# Revision 1.5  1996/07/12 00:36:21  drach
-# - (GRIB) use undefined flag only when set via lats_miss_XX
-# - (GRIB) use delta when checking for missing data
-# - (GRIB) define maximum and default precision
-# - fixed lats_vartab to work correctly.
-# - Added report of routine names, vertical dimension types
-#
-# Revision 1.4  1996/06/27 19:19:34  drach
-# - Misc. cleanup
-#
-# Revision 1.3  1996/06/27 01:32:49  drach
-# - Fixed up file permissions on install
-#
-# Revision 1.2  1996/06/27 01:02:38  drach
-# - Added installation directives
-#
-# Revision 1.1  1996/06/12 18:09:23  drach
-# - Initial versions
-#
-#
-#
-# Note: to generate Makefile from Makefile.in:
-#    ./configure --cache-file=/dev/null \
-#        [--with-ncinc=<netCDF include directory>] \
-#        [--with-nclib=<netCDF library directory>] \
-#	 [--prefix=<installation directory]
-
-# Install prefix for architecture-independent files
-prefix=@cdat_EXTERNALS@
-
-# Install prefix for architecture-dependent files
-exec_prefix=    $(prefix)
-
-# Expanded directories
-BINDIR=$(exec_prefix)/bin
-INCLUDEDIR=$(prefix)/include
-LIBDIR=$(exec_prefix)/lib
-MANDIR=$(prefix)/man
-PARMDIR=$(prefix)/lib/lats
-
-CC = cc
-CFLAGS = -I.  -I@cdat_EXTERNALS@/include $(DEBUG) -I/usr/include/malloc
-CPPFLAGS =  -DHAVE_NETCDF -DLATS_CHANGE_GRADS_NAMES -DSTNDALN=1 -DBYTEORDER=1 -DGRADS_CRAY=0
-DEBUG = -O
-DEFS =  -DOS_NAME=Linux -DOS_MAJOR=2 
-FC = gfortran
-FDEBUG =
-FFLAGS = -I.  $(FDEBUG)A -Wno-all
-INSTALL = /usr/bin/install -c
-INSTALL_PROGRAM = /usr/bin/install -c
-LDFLAGS = -L. -llats -L@cdat_EXTERNALS@/lib -lnetcdf  -lm
-OS = linux
-RANLIB = :
-
-# Shell to run make subcommands
-SHELL = /bin/sh
-
-#
-#	mf configuration
-#
-LATSLSVER="1.0"
-
-# Do not change values below this line
-#
-LIBRARY		= liblats.a 
-
-OBJS		= lats.o latsint.o latsnc.o latsgrib.o latstime.o latsstat.o latsfort.o fgbds.o \
-		  fgutil.o latsgribmap.o gaddes.o gagmap.o gamach.o gautil.o
-
-all:		$(LIBRARY) latsls
-
-latsls:	        latsls.o latsint.o
-		$(CC) latsls.o latsint.o $(CPPFLAGS) $(CFLAGS)  -o latsls
-
-$(LIBRARY):	$(OBJS)
-		ar rv $(LIBRARY) $?
-		$(RANLIB) $(LIBRARY)
-
-install:	libinstall
-
-libinstall:
-		-if test ! -d $(PARMDIR); \
-		then mkdir $(PARMDIR); \
-		fi
-		echo "LIBDIR"$(LIBDIR)
-		$(INSTALL_PROGRAM) -m 644 $(LIBRARY) $(LIBDIR)/$(LIBRARY)
-		(cd $(LIBDIR); $(RANLIB) $(LIBRARY))
-		$(INSTALL_PROGRAM) -m 644 lats.h $(INCLUDEDIR)/lats.h
-		$(INSTALL_PROGRAM) -m 644 lats.inc $(INCLUDEDIR)/lats.inc
-		$(INSTALL_PROGRAM) -m 644 amip2.parms $(PARMDIR)/amip2.parms
-
-test:		$(LIBRARY) testnc wgrib testgrib testf
-		@echo "test netCDF"
-		@if test -f testnc.nc; \
-		then rm -f testnc.nc; \
-		fi
-		@testnc
-		@rm -f testnc.nc
-		@echo "test GRIB"
-		@if test -f testgrib.grb; \
-		then rm -f testgrib.grb; \
-		fi
-		@if test -f testgrib.ctl; \
-		then rm -f testgrib.ctl; \
-		fi
-		@if test -f testgrib.bin; \
-		then rm -f testgrib.bin; \
-		fi
-		@testgrib
-		@rm -f testgrib.grb
-		@rm -f testgrib.ctl
-		@rm -f testgrib.bin
-		@echo "test FORTRAN"
-		@if test -f testf.nc; \
-		then rm -f testf.ne; \
-		fi
-		@testf
-		@rm -f testf.nc
-
-lats.o:		lats.c latsint.h lats.h
-latsint.o:	latsint.c latsint.h lats.h latsparm.h
-latsnc.o:	latsnc.c latsint.h lats.h
-latsgrib.o:	latsgrib.c fgrib.h fgrib_init.h latsint.h lats.h
-latstime.o:	latstime.c latsint.h lats.h latstime.h
-latsstat.o:	latsstat.c latsint.h lats.h
-latsfort.o:	latsfort.c lats.h cfortran.h
-fgbds.o:	grads.h fgbds.c fgrib.h
-fgutil.o:	fgutil.c fgrib.h
-latsgribmap.o:	grads.h gagmap.h
-gaddes.o:	grads.h
-gagmap.o:	grads.h gagmap.h
-gamach.o:	grads.h
-gautil.o:	grads.h
-latsls.o:	latsint.h latsls.c latsint.o
-		$(CC) -c latsls.c $(CPPFLAGS) $(DEFS) $(CFLAGS) -DLATSLS_VERSION=\"$(LATSLSVER)\"
-
-.SUFFIXES: .F
-
-.c.o:
-	$(CC) $(CPPFLAGS) $(DEFS) $(CFLAGS) -c $<
-
-.c:
-	$(CC) $(CPPFLAGS) $(DEFS) $(CFLAGS) -o $@ $< $(LDFLAGS)
-
-.F:
-	case $(OS) in \
-	     'aix') \
-		    /usr/ccs/lib/cpp -P $(CPPFLAGS) $(CFLAGS) $(INCLUDES) $(DEFS) $< $@.f; \
-		    $(FC) $(FFLAGS) $(LDFLAGS) $(LIBS) $@.f -o $@; \
-		    rm $@.f; \
-		    ;; \
-	     *) \
-		    $(FC) $(CPPFLAGS) $(INCLUDES) $(DEFS) $(FFLAGS) -o $@ $< $(LDFLAGS) $(LIBS); \
-		    ;; \
-	esac
-	
-clean:
-	-rm -f *.o core *.~*~ testnc testgrib testf testf.f
-
-distclean: clean
-	-rm -f $(LIBRARY) testnc testnc.nc testgrib testgrib.grb \
-	testgrib.ctl testgrib.bin testgrib.gmp testf.nc latsls wgrib
-
-settest:
-	cp liblats.a lib/
-	cp wgrib bin/
-	cp latsls bin/
-
diff --git a/CMake/cdat_modules_extra/libcf_install_step.cmake.in b/CMake/cdat_modules_extra/libcf_install_step.cmake.in
deleted file mode 100644
index f5c293642b1343f94fe675f34bc3751ed8ec5ea3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/libcf_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @libcf_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/libcf_make_step.cmake.in b/CMake/cdat_modules_extra/libcf_make_step.cmake.in
deleted file mode 100644
index 790efaa636433f728eab28890eafdb678cf7602f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/libcf_make_step.cmake.in
+++ /dev/null
@@ -1,15 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @libcf_source@
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in
deleted file mode 100644
index 77a49c002ef0087893cf909ece243e968a3c17c4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in
+++ /dev/null
@@ -1,78 +0,0 @@
-# DRS library Makefile
-#
-# Usage:
-#
-# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN:
-#	% make
-#
-#--------------------------------------------------------------------
-
-#jfp was DEBUG = -O
-DEBUG = -g -O
-FC = gfortran
-CC = gcc
-ARCHOPT = -mtune=native
-#ARCHOPT = -arch i386
-#ARCHOPT =
-FOPTS = -fcray-pointer $(ARCHOPT) -W
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio
-FFLAGS = $(DEBUG) $(FOPTS) -Dgfortran -Dsun -D__linux -D__linux_gfortran -fpic
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac
-CFLAGS = $(DEBUG) $(ARCHOPT) -Dsun -D__linux -D__linux_gfortran -fpic
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT)
-CPPFLAGS = -D__linux $(ARCHOPT) -fpic
-#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio
-CPP = cpp
-
-FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o cddrsfwrap.o
-FINCLUDES = drsdef.h drscom.h cycle.h
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o cddrs_fc.o
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: drsdef.h libdrs.a libdrs.so
-
-shared: drsdef.h libdrs.so
-
-libdrs.a: $(OBJECTS)
-	ar rv libdrs.a $?
-
-libdrs.so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -lgfortran  -shared -o libdrs.so $(OBJECTS)
-
-drsdef.h: drsdef.HH
-	$(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h
-#--------------------------------------------------------------------
-
-install: libdrs.a
-	cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a
-	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-	cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h
-#	install -f $(INSTALL_LIB) -m 644 libdrs.a
-#	install -f $(INSTALL_INC) -m 644 drsdef.h
-#	install -f $(INSTALL_INC) -m 644 drscdf.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f *.o
-	-rm -f *~
-	-rm -f core
-.SUFFIXES: .F .o
-
-.F.o:
-	$(FC) $(FFLAGS) -c $<
diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in
deleted file mode 100644
index 9cdb8773fe4148485a39ef269c4a2a0e80e8f4c5..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in
+++ /dev/null
@@ -1,85 +0,0 @@
-# DRS library Makefile
-#
-# Usage:
-#
-# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN:
-#	% make
-# This makefile is set up for a 64-bit Macintosh and gfortran/gcc 4.6.0
-# but see comments for how to use older Macs and older gfortran/gcc.
-#
-#--------------------------------------------------------------------
-
-# DEBUG = -O
-DEBUG = -g -O -save-temps
-FC = /usr/local/bin/gfortran
-CC = gcc
-#ARCHOPT = -arch x86_64
-#ARCHOPT = -arch i386
-ARCHOPT = -m64
-
-FOPTS = -fcray-pointer $(ARCHOPT) -W
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac
-FFLAGS = $(DEBUG) $(FOPTS) -Dsun -Dgfortran -D__linux -D__linux_gfortran -Dmac -Dmac64
-CFLAGS = $(DEBUG) $(ARCHOPT)
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-# Somehow CPPFLAGS ends out on the gcc lines...
-#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT)
-#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio   note that byteswapio is never referenced
-#CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac $(ARCHOPT)
-CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac -Dmac64 $(ARCHOPT)
-CPP = cpp
-
-FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o mac.o cddrsfwrap.o
-FINCLUDES = drsdef.h drscom.h cycle.h
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o macintosh.o cddrs_fc.o
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: drsdef.h libdrs.a libdrs.so
-
-shared: drsdef.h libdrs.so
-
-libdrs.a: $(OBJECTS)
-	ar rv libdrs.a $?
-
-# formerly in libdrs.so, needed for Fortran->cdms link: -lcdms
-libdrs.so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/X11R6/lib -L/usr/local/gfortran/lib -lgfortran  -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c  -lquadmath -lcdms -shared -lpng15 -ljasper -o libdrs.so $(OBJECTS)
-
-drsdef.h: drsdef.HH
-	$(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h
-#--------------------------------------------------------------------
-
-install: libdrs.a
-	cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a
-	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-	cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h
-#	install -f $(INSTALL_LIB) -m 644 libdrs.a
-#	install -f $(INSTALL_INC) -m 644 drsdef.h
-#	install -f $(INSTALL_INC) -m 644 drscdf.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f *.o
-	-rm -f *~
-	-rm -f core
-
-.SUFFIXES: .F .o
-
-.F.o:
-	$(FC) $(FFLAGS) -c $<
diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in
deleted file mode 100644
index d139f0b293f9667b6f341207449e6c09d1f0cdcc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in
+++ /dev/null
@@ -1,89 +0,0 @@
-# DRS library Makefile
-#
-# Usage:
-#
-# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN:
-#	% make
-# This makefile is set up for a 64-bit Macintosh and gfortran/gcc 4.6.0
-# but see comments for how to use older Macs and older gfortran/gcc.
-#
-#--------------------------------------------------------------------
-
-# DEBUG = -O
-DEBUG = -g -O -save-temps
-FC = /usr/local/bin/gfortran
-CC = gcc
-#ARCHOPT = -arch x86_64
-#ARCHOPT = -arch i386
-ARCHOPT = -m64
-
-FOPTS = -fcray-pointer $(ARCHOPT) -W
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac
-FFLAGS = $(DEBUG) $(FOPTS) -Dsun -Dgfortran -D__linux -D__linux_gfortran -Dmac -Dmac64
-CFLAGS = $(DEBUG) $(ARCHOPT)
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-# Somehow CPPFLAGS ends out on the gcc lines...
-#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT)
-#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio   note that byteswapio is never referenced
-#CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac $(ARCHOPT)
-CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac -Dmac64 $(ARCHOPT)
-CPP = cpp
-
-FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o mac.o
-# formerly in FOBJECTS, needed for Fortran->cdms link:  cddrsfwrap.o
-# .. cddrsfwrap.o is a Fortran wrapper for libcdms; not really part of libdrs.
-FINCLUDES = drsdef.h drscom.h cycle.h
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o macintosh.o
-# formerly in COBJECTS, needed for Fortran->cdms link: cddrs_fc.o
-# ... cddrs_fc.o is C code to support the Fortran wrapper for libcdms; not really part of libdrs.
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: drsdef.h libdrs.a libdrs.so
-
-shared: drsdef.h libdrs.so
-
-libdrs.a: $(OBJECTS)
-	ar rv libdrs.a $?
-
-# formerly in libdrs.so, needed for Fortran->cdms link: -lcdms
-libdrs.so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/local/gfortran/lib -lgfortran  -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c  -lquadmath -shared -o libdrs.so $(OBJECTS)
-
-drsdef.h: drsdef.HH
-	$(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h
-#--------------------------------------------------------------------
-
-install: libdrs.a
-	cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a
-	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-	cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h
-#	install -f $(INSTALL_LIB) -m 644 libdrs.a
-#	install -f $(INSTALL_INC) -m 644 drsdef.h
-#	install -f $(INSTALL_INC) -m 644 drscdf.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f *.o
-	-rm -f *~
-	-rm -f core
-
-.SUFFIXES: .F .o
-
-.F.o:
-	$(FC) $(FFLAGS) -c $<
diff --git a/CMake/cdat_modules_extra/lxml_build_step.cmake.in b/CMake/cdat_modules_extra/lxml_build_step.cmake.in
deleted file mode 100644
index dca0940b94ed9e9e72ce3c7926798d70b7eb6645..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/lxml_build_step.cmake.in
+++ /dev/null
@@ -1,19 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CXXFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CPPFLAGS}")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@LXML_SOURCE_DIR@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE LXML_OUT
-  OUTPUT_VARIABLE LXML_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}")
-  message(FATAL_ERROR "Error in config of LXML")
-endif()
-message("lxml build worked.")
-
diff --git a/CMake/cdat_modules_extra/lxml_install_step.cmake.in b/CMake/cdat_modules_extra/lxml_install_step.cmake.in
deleted file mode 100644
index 21651e44eb62487772cd1089a60baf3304e09929..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/lxml_install_step.cmake.in
+++ /dev/null
@@ -1,14 +0,0 @@
-message("Installing LXML:\n@LXML_PREFIX_ARGS@")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@LXML_BINARY_DIR@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}")
-  message(FATAL_ERROR "Error in config of LXML")
-endif()
-message("lxml install succeeded.")
-
diff --git a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in
deleted file mode 100644
index bb0102cf5cbb9cb6339a8faadd426cfbe05f4602..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@matplotlib_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in
deleted file mode 100644
index 6c28091a3a4b9d9e475c7b4af9ea5b4831712b19..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in
+++ /dev/null
@@ -1,9 +0,0 @@
-set(INSTALL_DIR "@cdat_EXTERNALS@")
-
-configure_file(
-  "@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/matplotlib_setup_cfg.in"
-  "@matplotlib_source_dir@/setup.cfg"
-  @ONLY
-)
-
-set(ENV{LD_LIBRARY_PATH} "${INSTALL_DIR}/lib;$ENV{LD_LIBRARY_PATH}")
diff --git a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in b/CMake/cdat_modules_extra/matplotlib_setup_cfg.in
deleted file mode 100644
index 5dc914fe20a4f113601eb1b7a735c03ca834d01b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in
+++ /dev/null
@@ -1,76 +0,0 @@
-# Rename this file to setup.cfg to modify matplotlib's
-# build options.
-
-[egg_info]
-
-[directories]
-# Uncomment to override the default basedir in setupext.py.
-# This can be a single directory or a space-delimited list of directories.
-# basedirlist = @INSTALL_DIR@
-
-[status]
-# To suppress display of the dependencies and their versions
-# at the top of the build log, uncomment the following line:
-#suppress = True
-#
-# Uncomment to insert lots of diagnostic prints in extension code
-#verbose = True
-
-[provide_packages]
-# By default, matplotlib checks for a few dependencies and
-# installs them if missing. This feature can be turned off
-# by uncommenting the following lines. Acceptible values are:
-#     True: install, overwrite an existing installation
-#     False: do not install
-#     auto: install only if the package is unavailable. This
-#           is the default behavior
-#
-## Date/timezone support:
-#pytz = False
-#dateutil = False
-
-[gui_support]
-# Matplotlib supports multiple GUI toolkits, including Cocoa,
-# GTK, Fltk, MacOSX, Qt, Qt4, Tk, and WX. Support for many of
-# these toolkits requires AGG, the Anti-Grain Geometry library,
-# which is provided by matplotlib and built by default.
-#
-# Some backends are written in pure Python, and others require
-# extension code to be compiled. By default, matplotlib checks
-# for these GUI toolkits during installation and, if present,
-# compiles the required extensions to support the toolkit. GTK
-# support requires the GTK runtime environment and PyGTK. Wx
-# support requires wxWidgets and wxPython. Tk support requires
-# Tk and Tkinter. The other GUI toolkits do not require any
-# extension code, and can be used as long as the libraries are
-# installed on your system.
-#
-# You can uncomment any the following lines if you know you do
-# not want to use the GUI toolkit. Acceptible values are:
-#     True: build the extension. Exits with a warning if the
-#           required dependencies are not available
-#     False: do not build the extension
-#     auto: build if the required dependencies are available,
-#           otherwise skip silently. This is the default
-#           behavior
-#
-gtk = False
-gtkagg = False
-tkagg = False
-macosx = False
-qt5agg = False
-
-[rc_options]
-# User-configurable options
-#
-# Default backend, one of: Agg, Cairo, CocoaAgg, GTK, GTKAgg, GTKCairo,
-# FltkAgg, MacOSX, Pdf, Ps, QtAgg, Qt4Agg, SVG, TkAgg, WX, WXAgg.
-#
-# The Agg, Ps, Pdf and SVG backends do not require external
-# dependencies. Do not choose GTK, GTKAgg, GTKCairo, MacOSX, TkAgg or WXAgg
-# if you have disabled the relevent extension modules.  Agg will be used
-# by default.
-#
-backend = @MATPLOTLIB_BACKEND@
-backend.qt4 = PyQt4
-#
diff --git a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in
deleted file mode 100644
index 582bbbf9e8ac8b0bb0170f5af76231c2e600e584..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing mpi4py:\n@mpi4py_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@mpi4py_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE mpi4py_OUT
-  OUTPUT_VARIABLE mpi4py_ERR
-)
-
-if(NOT ${res} EQUAL 0)
-  message("mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}")
-  message(FATAL_ERROR "Error in config of mpi4py")
-endif()
-message("Mpi4py install succeeded.")
diff --git a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in
deleted file mode 100644
index 96f160201c279292def90fb7aff140b748fb8030..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Building Mpi4py:\n@mpi4py_binary@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@mpi4py_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE mpi4py_OUT
-  OUTPUT_VARIABLE mpi4py_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("Mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}")
-  message(FATAL_ERROR "Error in config of mpi4py")
-endif()
-message("mpi4py build worked.")
diff --git a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in b/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in
deleted file mode 100644
index e16a54148d8c74d784f0a700665b930a43c07df3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-  execute_process(
-    WORKING_DIRECTORY @netcdf_source@
-    COMMAND patch -p1
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/netcdf_clang.patch
-  )
-
diff --git a/CMake/cdat_modules_extra/paraview_download.sh.in b/CMake/cdat_modules_extra/paraview_download.sh.in
deleted file mode 100755
index dee9d7f795ee70f1098ea6bfb3e503efe2302bdc..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/paraview_download.sh.in
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-cd @CMAKE_CURRENT_BINARY_DIR@/build/
-
-"@GIT_EXECUTABLE@" clone @PARAVIEW_SOURCE@
-cd ParaView
-"@GIT_EXECUTABLE@" checkout @paraview_branch@
-"@GIT_EXECUTABLE@" submodule init
-
-SUBMODULES=`git submodule status | sed 's/.* //' | sed ':a;N;$!ba;s/\n/ /g'`
-
-for SUBMODULE in $SUBMODULES
-do 
-  tmp=`git config  --get submodule.$SUBMODULE.url`
-  tmp=`echo $tmp | sed 's/@REPLACE_GIT_PROTOCOL_PREFIX@/@GIT_PROTOCOL_PREFIX@/g'`
-  git config "submodule.$SUBMODULE.url" $tmp
-done
-
-"@GIT_EXECUTABLE@" submodule update --recursive
diff --git a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in b/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in
deleted file mode 100644
index aafa3a97151936025afe41aec9187502ca4e58f2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in
+++ /dev/null
@@ -1,25 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_flags@ @cdat_external_include_directories@")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cppflags@ @cdat_external_include_directories@")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cxxflags@")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @ParaView_binary@/Utilities/VTKPythonWrapping
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in VTK Python Install")
-endif()
-
-message("Install succeeded.")
-
diff --git a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in b/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in
deleted file mode 100644
index 09cbc2ad85952fff1d8899292d3705a7ab22026e..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in
+++ /dev/null
@@ -1,9 +0,0 @@
-
-set(EXTERNALS @pbmplus_install@)
-configure_file(@cdat_external_patch_dir@/src/pbmplus/Makefile.in
-  @pbmplus_source@/Makefile
-  @ONLY)
-
-configure_file(@cdat_external_patch_dir@/src/pbmplus/pnm/Makefile.in
-  ${pbmplus_source}/pnm/Makefile
-  @ONLY)
diff --git a/CMake/cdat_modules_extra/pmw_install_step.cmake.in b/CMake/cdat_modules_extra/pmw_install_step.cmake.in
deleted file mode 100644
index 769aa7454fb4a9c2822d5a44a2f75bd78c5e0648..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pmw_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @Pmw_source@/src
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/pmw_make_step.cmake.in b/CMake/cdat_modules_extra/pmw_make_step.cmake.in
deleted file mode 100644
index a1d3f9759c6011ff042df8355aaaf1ca2fbb0474..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pmw_make_step.cmake.in
+++ /dev/null
@@ -1,15 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @Pmw_source@/src
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
diff --git a/CMake/cdat_modules_extra/predownload.py.in b/CMake/cdat_modules_extra/predownload.py.in
deleted file mode 100755
index 3a3af910464ad30b7cc417fd5c632c4ff7c101ba..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/predownload.py.in
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python
-
-import shlex
-import subprocess
-import urllib2
-import os
-
-fileName = "@PARTS_BUILT_INFO@"
-fetched_data = "@cdat_BINARY_DIR@/fetched_for_offline"
-try:
-    os.makedirs(fetched_data)
-except:
-    pass
-try:
-    os.makedirs(os.path.join(fetched_data,"contrib"))
-except:
-    pass
-
-
-def fetch(url,md5=None):
-    try:
-        import hashlib
-        HAS_HASHLIB=True
-    except:
-        HAS_HASHLIB=False
-    if md5 is None:
-        HAS_HASHLIB=False
-
-    print "Fetching: ",url
-    if HAS_HASHLIB:
-        print "Will control md5"
-    u = urllib2.urlopen(url)
-    nm = os.path.join(fetched_data,url.split("/")[-1])
-    f=open(nm,'w')
-    sz = 65536
-    if HAS_HASHLIB:
-        hsh =hashlib.md5()
-    buf = u.read(sz)
-    while len(buf)>0:
-        f.write(buf)
-        if HAS_HASHLIB:
-            hsh.update(buf)
-        buf=u.read(sz)
-    f.close()
-    if HAS_HASHLIB and hsh.hexdigest()!=md5:
-        raise Exception,"Error downloading file: %s, md5 does not match" % nm
-
-def processFile(name):
-    f=open(name)
-    for ln in f.xreadlines():
-        sp = ln.split()
-        nm = sp[0]
-        ver = sp[1]
-        try:
-            url = sp[2]
-        except:
-            url = None
-        try:
-            md5 = sp[3]
-        except:
-            md5 = None
-        try:
-            url2 = sp[4]
-        except:
-            url2 = None
-        try:
-            md5b = sp[5]
-        except:
-            md5b = None
-        if url=="N/A":
-            continue
-        elif url.find("git://")>-1 or url.strip()[-4:]==".git":
-            if md5 is None:
-                md5 = "master"
-            nm  = url.split("/")[-1][:-4]
-            cmd = "git clone --depth 1 -b %s %s %s/%s " % (md5,url,fetched_data,nm)
-            subprocess.Popen(shlex.split(cmd))
-        elif url is not None:
-            fetch(url,md5)
-        if url2 is not None:
-            fetch(url2,md5b)
-    ## Ok now does the git submodules
-    for c in ["eof2","windfield","sciMake","windspharm","eofs"]:
-        cmd = "cp -rf @cdat_SOURCE_DIR@/contrib/%s %s/contrib" % (c,fetched_data)
-        subprocess.Popen(shlex.split(cmd))
-if __name__ == "__main__":
-    processFile(fileName)
-
diff --git a/CMake/cdat_modules_extra/preofflinebuild.sh.in b/CMake/cdat_modules_extra/preofflinebuild.sh.in
deleted file mode 100755
index b42dacfdec8dd6957d1953304a56d9547dcbb0c9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/preofflinebuild.sh.in
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-fetched_data="fetched_for_offline"
-cp ${fetched_data}/*gz ${fetched_data}/*bz2 ${fetched_data}/*zip @cdat_BINARY_DIR@
-cp -r ${fetched_data}/contrib @cdat_SOURCE_DIR@
-cp -rf ${fetched_data}/vistrails @CMAKE_INSTALL_PREFIX@
-cp -rf ${fetched_data}/paraview-*/* @cdat_BINARY_DIR@/build/ParaView
-tar -xf @cdat_BINARY_DIR@/visit*.gz -C @cdat_BINARY_DIR@
-rm -rf @cdat_BINARY_DIR@/build/VisIt
-mv @cdat_BINARY_DIR@/src @cdat_BINARY_DIR@/build/VisIt
-
diff --git a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in
deleted file mode 100644
index 910bef7e26799e67d68fbd8cfd5855dd4f9c4bdb..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @PyOpenGL_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in
deleted file mode 100644
index 41fe74e84038939594429f5682ec1cf30e1ad676..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @PyOpenGL_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in b/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in
deleted file mode 100644
index 54bf52d5fcc61b37661dc1265d583c06f678027f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-  execute_process(
-    WORKING_DIRECTORY @pyspharm_source@
-    COMMAND patch 
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/pyspharm_setup.patch
-  )
-
diff --git a/CMake/cdat_modules_extra/python_configure_step.cmake.in b/CMake/cdat_modules_extra/python_configure_step.cmake.in
deleted file mode 100644
index 27f653275164ca8bab9e91fe36191634bce3dc3b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/python_configure_step.cmake.in
+++ /dev/null
@@ -1,42 +0,0 @@
-CMAKE_POLICY(SET CMP0012 NEW)
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}")
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@  @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@ $ENV{LDFLAGS}")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@  ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{OPT} "")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-if(APPLE)
-  set(ENV{AQUA_CDAT} "no")
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  unset(ENV{MAKEFLAGS})
-  if(@_CURRENT_OSX_SDK_VERSION@ VERSION_LESS "10.11")
-    set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --with-system-expat --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks)
-  else()
-    set(library_param --prefix=@CMAKE_INSTALL_PREFIX@  --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks)
-  endif()
-elseif(UNIX)
-  set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --enable-shared --enable-unicode=ucs4)
-endif()
-
-EXECUTE_PROCESS(
-  COMMAND sh configure ${library_param}
-  WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
diff --git a/CMake/cdat_modules_extra/python_install_step.cmake.in b/CMake/cdat_modules_extra/python_install_step.cmake.in
deleted file mode 100644
index 74a63d18155f44088a77cbfc17f8bf9dd3f88fd9..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/python_install_step.cmake.in
+++ /dev/null
@@ -1,51 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-# During install for what ever reason python will fail if these are set.
-
-set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}")
-if(NOT APPLE)
-  set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-endif()
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@  @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib  @cdat_osx_ld_flags@")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@  ${ADDITIONAL_CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{OPT} "")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-if(APPLE)
-  set(ENV{AQUA_CDAT} "no")
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  unset(ENV{MAKEFLAGS})
- 
-  EXECUTE_PROCESS(
-    COMMAND make frameworkinstallunixtools
-    WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
-
-  EXECUTE_PROCESS(
-    COMMAND make frameworkinstall
-    WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
-
-else()
-
-  EXECUTE_PROCESS(
-    COMMAND make install
-    WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
-
-endif()
diff --git a/CMake/cdat_modules_extra/python_make_step.cmake.in b/CMake/cdat_modules_extra/python_make_step.cmake.in
deleted file mode 100644
index 674463f893430b0aac68bd04d35f36d7072bc96c..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/python_make_step.cmake.in
+++ /dev/null
@@ -1,34 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}")
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@  @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib  @cdat_osx_ld_flags@ $ENV{LDFLAGS}")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@  ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{OPT} "")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-if(APPLE)
-  set(ENV{AQUA_CDAT} "no")
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  unset(ENV{MAKEFLAGS})
-endif()
-
-EXECUTE_PROCESS( 
-  COMMAND make
-  WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
diff --git a/CMake/cdat_modules_extra/python_patch_step.cmake.in b/CMake/cdat_modules_extra/python_patch_step.cmake.in
deleted file mode 100644
index ff2843efb5e19c4ba5e3ccc23151a202bc9bfd15..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/python_patch_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/setup.py"
-  RESULT_VARIABLE errcode
-)
-if("${errcode}" STREQUAL "0")
-    message(STATUS "setup.py replaced")
-else()
-    message(FATAL_ERROR "Replacing setup.py failed: ${errcode}")
-endif()
-
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/site-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/Lib/site.py"
-  RESULT_VARIABLE errcode
-)
-if("${errcode}" STREQUAL "0")
-    message(STATUS "site.py replaced")
-else()
-    message(FATAL_ERROR "Replacing site.py failed: ${errcode}")
-endif()
diff --git a/CMake/cdat_modules_extra/python_setup.py.in b/CMake/cdat_modules_extra/python_setup.py.in
deleted file mode 100644
index 106853088ed28b371e91dc187bbf6c453b3d348b..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/python_setup.py.in
+++ /dev/null
@@ -1,1918 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision: 78785 $"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-
-from distutils import log
-from distutils import sysconfig
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-
-        # Figure out the location of the source code for extension modules
-        # (This logic is copied in distutils.test.test_sysconfig,
-        # so building in a separate directory does not break test_distutils.)
-        moddir = os.path.join(os.getcwd(), srcdir, 'Modules')
-        moddir = os.path.normpath(moddir)
-        srcdir, tail = os.path.split(moddir)
-        srcdir = os.path.normpath(srcdir)
-        moddir = os.path.normpath(moddir)
-
-        moddirlist = [moddir]
-        incdirlist = ['./Include']
-
-        # Platform-dependent module source and include directories
-        platform = self.get_platform()
-        if platform in ('darwin', 'mac') and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(os.getcwd(), srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append('./Mac/Include')
-
-        alldirlist = moddirlist + incdirlist
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = glob("Include/*.h") + ["pyconfig.h"]
-
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, alldirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            ext.include_dirs.append( '.' ) # to get config.h
-            for incdir in incdirlist:
-                ext.include_dirs.append( os.path.join(srcdir, incdir) )
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        if platform != 'mac':
-            # Parse Modules/Setup and Modules/Setup.local to figure out which
-            # modules are turned on in the file.
-            remove_modules = []
-            for filename in ('Modules/Setup', 'Modules/Setup.local'):
-                input = text_file.TextFile(filename, join_lines=1)
-                while 1:
-                    line = input.readline()
-                    if not line: break
-                    line = line.split()
-                    remove_modules.append(line[0])
-                input.close()
-
-            for ext in self.extensions[:]:
-                if ext.name in remove_modules:
-                    self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print "Failed to find the necessary bits to build these modules:"
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def detect_modules(self):
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr':
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos', 'mac']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c'],
-                               libraries=math_libs) )
-
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.0 _fileio module
-        exts.append( Extension("_fileio", ["_fileio.c"]) )
-        # Python 3.0 _bytesio module
-        exts.append( Extension("_bytesio", ["_bytesio.c"]) )
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        exts.append( Extension('fcntl', ['fcntlmodule.c']) )
-        if platform not in ['mac']:
-            # pwd(3)
-            exts.append( Extension('pwd', ['pwdmodule.c']) )
-            # grp(3)
-            exts.append( Extension('grp', ['grpmodule.c']) )
-            # spwd, shadow passwords
-            if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                    config_h_vars.get('HAVE_GETSPENT', False)):
-                exts.append( Extension('spwd', ['spwdmodule.c']) )
-            else:
-                missing.append('spwd')
-        else:
-            missing.extend(['pwd', 'grp', 'spwd'])
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos', 'mac']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        if platform not in ['mac']:
-            # syslog daemon interface
-            exts.append( Extension('syslog', ['syslogmodule.c']) )
-        else:
-            missing.append('syslog')
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if self.compiler.find_library_file(lib_dirs,
-                                                 'ncursesw'):
-                readline_libs.append('ncursesw')
-            elif self.compiler.find_library_file(lib_dirs,
-                                                 'ncurses'):
-                readline_libs.append('ncurses')
-            elif self.compiler.find_library_file(lib_dirs, 'curses'):
-                readline_libs.append('curses')
-            elif self.compiler.find_library_file(lib_dirs +
-                                               ['/usr/lib/termcap'],
-                                               'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        if platform not in ['mac']:
-            # crypt module.
-
-            if self.compiler.find_library_file(lib_dirs, 'crypt'):
-                libs = ['crypt']
-            else:
-                libs = []
-            exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-        else:
-            missing.append('crypt')
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-        for ssl_inc_dir in inc_dirs + search_for_ssl_incs_in:
-            name = os.path.join(ssl_inc_dir, 'openssl', 'opensslv.h')
-            if os.path.isfile(name):
-                try:
-                    incfile = open(name, 'r')
-                    for line in incfile:
-                        m = openssl_ver_re.match(line)
-                        if m:
-                            openssl_ver = eval(m.group(1))
-                            break
-                except IOError:
-                    pass
-
-            # first version found is what we'll use (as the compiler should)
-            if openssl_ver:
-                break
-
-        #print 'openssl_ver = 0x%08x' % openssl_ver
-
-        if (ssl_incs is not None and
-            ssl_libs is not None and
-            openssl_ver >= 0x00907000):
-            # The _hashlib module wraps optimized implementations
-            # of hash functions from the OpenSSL library.
-            exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto']) )
-            # these aren't strictly missing since they are unneeded.
-            #missing.extend(['_sha', '_md5'])
-        else:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-            missing.append('_hashlib')
-
-        if (openssl_ver < 0x00908000):
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 7)
-        min_db_ver = (3, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-                db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                # Look for a version specific db-X.Y before an ambiguoius dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            if find_file("ndbm.h", inc_dirs, []) is not None:
-                # Some systems have -lndbm, others don't
-                if self.compiler.find_library_file(lib_dirs, 'ndbm'):
-                    ndbm_libs = ['ndbm']
-                else:
-                    ndbm_libs = []
-                exts.append( Extension('dbm', ['dbmmodule.c'],
-                                       define_macros=[('HAVE_NDBM_H',None)],
-                                       libraries = ndbm_libs ) )
-            elif self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                gdbm_libs = ['gdbm']
-                if self.compiler.find_library_file(lib_dirs, 'gdbm_compat'):
-                    gdbm_libs.append('gdbm_compat')
-                if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                    exts.append( Extension(
-                        'dbm', ['dbmmodule.c'],
-                        define_macros=[('HAVE_GDBM_NDBM_H',None)],
-                        libraries = gdbm_libs ) )
-                elif find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                    exts.append( Extension(
-                        'dbm', ['dbmmodule.c'],
-                        define_macros=[('HAVE_GDBM_DASH_NDBM_H',None)],
-                        libraries = gdbm_libs ) )
-                else:
-                    missing.append('dbm')
-            elif db_incs is not None:
-                exts.append( Extension('dbm', ['dbmmodule.c'],
-                                       library_dirs=dblib_dir,
-                                       runtime_library_dirs=dblib_dir,
-                                       include_dirs=db_incs,
-                                       define_macros=[('HAVE_BERKDB_H',None),
-                                                      ('DB_DBM_HSEARCH',None)],
-                                       libraries=dblibs))
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if (self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['mac', 'win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if (self.compiler.find_library_file(lib_dirs, 'ncursesw')):
-            curses_libs = ['ncursesw']
-            # Bug 1464056: If _curses.so links with ncursesw,
-            # _curses_panel.so must link with panelw.
-            panel_library = 'panelw'
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif (self.compiler.find_library_file(lib_dirs, 'ncurses')):
-            curses_libs = ['ncurses']
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif (self.compiler.find_library_file(lib_dirs, 'curses')
-              and platform != 'darwin'):
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a
-        # group of developers on SourceForge; see www.libexpat.org for
-        # more information.  The pyexpat module was written by Paul
-        # Prescod after a prototype by Jack Jansen.  The Expat source
-        # is included in Modules/expat/.  Usage of a system
-        # shared libexpat.so/expat.dll is not advised.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        expatinc = os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')
-        define_macros = [
-            ('HAVE_EXPAT_CONFIG_H', '1'),
-        ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = [expatinc],
-                              sources = ['pyexpat.c',
-                                         'expat/xmlparse.c',
-                                         'expat/xmlrole.c',
-                                         'expat/xmltok.c',
-                                         ],
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = [expatinc],
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict(
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                HAVE_BROKEN_SEM_GETVALUE=1
-                )
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict(
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=1,
-                HAVE_FD_TRANSFER=0,
-                HAVE_BROKEN_SEM_UNLINK=1
-                )
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict(                  # FreeBSD
-                HAVE_SEM_OPEN=0,
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                )
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict(                  # OpenBSD
-                HAVE_SEM_OPEN=0,            # Not implemented
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                )
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict(                  # at least NetBSD 5
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                HAVE_BROKEN_SEM_GETVALUE=1
-                )
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict(
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=1,
-                HAVE_FD_TRANSFER=1
-                )
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-
-            if macros.get('HAVE_SEM_OPEN', False):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8'):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-            for fw in 'Tcl', 'Tk':
-                if not exists(join(F, fw + '.framework')):
-                    break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-        fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        if (platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.4', '84']:
-            tklib = self.compiler.find_library_file(lib_dirs, 'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs, 'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                           'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                           'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            (srcdir,) = sysconfig.get_config_vars('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            exec open(ffi_configfile) in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c',
-                   '_ctypes/malloc_closure.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in
deleted file mode 100644
index 96a849a05669312679af408ed96b1102fdb297a7..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Building PyZMQ:\n@pyzmq_binary@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py configure --zmq=@cdat_EXTERNALS@
-  WORKING_DIRECTORY "@pyzmq_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE pyzmq_OUT
-  OUTPUT_VARIABLE pyzmq_ERR)
-
-if(NOT ${res} EQUAL 0)
-    message("PyZMQ errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}")
-  message(FATAL_ERROR "Error in config of pyzmq")
-endif()
-message("pyzmq build worked.")
diff --git a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in
deleted file mode 100644
index da21d89c1e1de69b1fd1fd472c37d3612477c4a3..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing pyzmq:\n@pyzmq_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install --zmq=@cdat_EXTERNALS@ --prefix=@PYTHON_SITE_PACKAGES_PREFIX@
-  WORKING_DIRECTORY "@pyzmq_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE pyzmq_OUT
-  OUTPUT_VARIABLE pyzmq_ERR
-)
-
-if(NOT ${res} EQUAL 0)
-  message("pyzmq Errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}")
-  message(FATAL_ERROR "Error in config of pyzmq")
-endif()
-message("pyzmq install succeeded.")
diff --git a/CMake/cdat_modules_extra/reset_runtime.csh.in b/CMake/cdat_modules_extra/reset_runtime.csh.in
deleted file mode 100644
index c4ed68faac1e0f0c32b772030ac8e53b46ed33a6..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/reset_runtime.csh.in
+++ /dev/null
@@ -1,24 +0,0 @@
-# First of all reset variables
-foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH prompt )
-    set tmp="UVCDAT_ORIGINAL_"${v}
-    if ( `eval echo \$\?$tmp` ) then
-        echo ${v}" env variable reset"
-        set vv=`eval echo \$$tmp`
-        setenv ${v} ${vv}
-        unsetenv ${tmp}
-    endif
-end
-
-# Now variables for which we may have changed value or created
-foreach v ( OPAL_PREFIX LIBOVERLAY_SCROLLBAR )
-    set tmp="UVCDAT_ORIGINAL_"${v}
-    if ( `eval echo \$\?$tmp` ) then
-        echo ${v}" env variable reset"
-        set vv=`eval echo \$$tmp`
-        setenv ${v} ${vv}
-    else
-        unsetenv ${tmp}
-    endif
-end
-unsetenv UVCDAT_PROMPT_STRING
-unsetenv UVCDAT_SETUP_PATH
diff --git a/CMake/cdat_modules_extra/reset_runtime.sh.in b/CMake/cdat_modules_extra/reset_runtime.sh.in
deleted file mode 100644
index 37f9577278c32813b81c4a0b296aaf0ea12adb40..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/reset_runtime.sh.in
+++ /dev/null
@@ -1,16 +0,0 @@
-# First of all reset variables
-for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH PS1 OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do
-   tmp="UVCDAT_ORIGINAL_"${v}
-   if [ -n "${!tmp}" ] ; then
-        echo ${v}" env variable reset"
-        if [ "${!tmp}" != " " ] ; then
-            export ${v}=${!tmp}
-        else
-            unset ${v}
-        fi
-        unset ${tmp}
-    fi
-done
-unset UVCDAT_PROMPT_STRING
-unset UVCDAT_SETUP_PATH
-return 0
diff --git a/CMake/cdat_modules_extra/runpytest.in b/CMake/cdat_modules_extra/runpytest.in
deleted file mode 100755
index 42fe55e682dce19bd692230176eab84d1e4cd42a..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/runpytest.in
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-# glue script to playback a recorded uvcdat vistrail and compare the result
-# with a known good baseline image.
-# takes three arguments: the name of the vistrail.vt:tagname to playback
-# a set of aliases for that trail (to replace filenames for example)
-# the filename of the image to compare against
-
-# setup uvcdat run time environment
-. @CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh
-
-# play back the requested vistrail and make an image
-"@PYTHON_EXECUTABLE@" \
-  @CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py \
- -R \
- -S $1\
- -b $2\
- -a $3\
- -e @CMAKE_BINARY_DIR@/Testing/Temporary
-
-# compare that image with the baseline(s) for it
-"@PYTHON_EXECUTABLE@" \
- @cdat_SOURCE_DIR@/testing/checkimage.py \
- @CMAKE_BINARY_DIR@/Testing/Temporary/$4 \
- $5/$4 \
- $6
diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index 972a674adb9032364be2eb5b127fe41b12b069bd..4946cf488e051034b7cc06bb2492f9504d9a5eca 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -1,4 +1,9 @@
 #!/bin/bash
-# source is not portable whereas . is
-. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh"
-$@
+echo "ACTIVATING ENV:"@CONDA_ENVIRONMENT_NAME@
+source activate @CONDA_ENVIRONMENT_NAME@
+export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"`
+echo "Python:" `which python`
+echo "Running: "$*
+python -c "import vtk;print 'VTK_VERSION:',vtk.VTK_VERSION"
+python -c "import numpy;print 'NUMPY_VERSION:',numpy.version.version"
+$*
diff --git a/CMake/cdat_modules_extra/seawater_build_step.cmake.in b/CMake/cdat_modules_extra/seawater_build_step.cmake.in
deleted file mode 100644
index 7118a8eb257c93b4634a431a1d117a6e5ae5235d..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/seawater_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@seawater_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/setup_runtime.csh.in b/CMake/cdat_modules_extra/setup_runtime.csh.in
deleted file mode 100755
index 8a7f1c83b0345dda07bab9622a57b5b13a1aa787..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/setup_runtime.csh.in
+++ /dev/null
@@ -1,117 +0,0 @@
-# Main install prefix set by user or post install script:
-# UVCDAT_INSTALL_PREFIX
-
-# First reset any existing UVCDAT env
-. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh
-
-# Now store existing env var that we will be tweaking
-foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR )
-    if ( `eval echo \$\?$v` ) then
-        set vv=`eval echo \$$v`
-        setenv UVCDAT_ORIGINAL_${v} ${vv}
-    else
-        setenv UVCDAT_ORIGINAL_${v} " "
-    endif
-end
-
-setenv UVCDAT_PROMPT_STRING @UVCDAT_PROMPT_STRING@
-if ( $?UVCDAT_PROMPT_BEGINNING ) then
-  setenv UVCDAT_ORIGINAL_prompt ${prompt}
-  set prompt = "[@UVCDAT_PROMPT_STRING@]${prompt}"
-else if ( $?UVCDAT_PROMPT_END ) then
-  setenv UVCDAT_ORIGINAL_prompt ${prompt}
-  set prompt = "${prompt}[@UVCDAT_PROMPT_STRING@]"
-endif
-
-# If unset, use the value configured by cmake by default.
-
-# Everything beyond this point will be determined relatively
-# from this path.
-if ( $?UVCDAT_INSTALL_PREFIX ) then
-  set install_prefix=${UVCDAT_INSTALL_PREFIX}
-else
-  set install_prefix=@CMAKE_INSTALL_PREFIX@
-endif
-
-# Try to prevent the user from sourcing twice,
-# which can lead to errors.
-if ( $?UVCDAT_SETUP_PATH ) then
-  if ( ${UVCDAT_SETUP_PATH} == ${install_prefix} ) then
-    echo 'Nothing to do since UVCDAT is already setup at '${UVCDAT_SETUP_PATH}
-    exit 0
-  else
-    echo 'ERROR: UVCDAT setup was previously sourced at '${UVCDAT_SETUP_PATH}
-    echo 'ERROR: There is no need to run setup_runtime manually anymore.'
-    echo 'ERROR: Open a new shell in order to use a different install location.'
-    echo 'ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh.'
-    exit 1
-  endif
-endif
-
-# Check that the install prefix exists, otherwise stop.
-if ( ! -d ${install_prefix} ) then
-  echo 'ERROR: '${install_prefix}' is not a directory.'
-  exit 1
-endif
-
-if ( ! $?LD_LIBRARY_PATH ) then
-  setenv LD_LIBRARY_PATH ''
-endif
-
-if ( ! $?PYTHONPATH ) then
-  setenv PYTHONPATH ''
-endif
-
-if ( ! $?PATH ) then
-  setenv PATH ''
-endif
-
-if ( '@QT_LIB_DIR@' != '' ) then
-  if ( -d @QT_LIB_DIR@ ) then
-    setenv LD_LIBRARY_PATH @QT_LIB_DIR@:${LD_LIBRARY_PATH}
-  endif
-endif
-
-foreach d ( @SETUP_LIBRARY_PATHS@ )
-  set f=${install_prefix}/${d}
-  if ( -d ${f} ) then
-    setenv LD_LIBRARY_PATH ${f}:${LD_LIBRARY_PATH}
-  endif
-end
-
-if ( `uname` == 'Darwin' ) then
-  setenv LD_LIBRARY_PATH /usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH}
-  setenv DYLD_FALLBACK_LIBRARY_PATH ${LD_LIBRARY_PATH}
-endif
-
-foreach d ( @SETUP_PYTHON_PATHS@ )
-  set f=${install_prefix}/${d}
-  if ( -d ${f} ) then
-    setenv PYTHONPATH ${f}:${PYTHONPATH}
-  endif
-end
-
-foreach d ( @SETUP_EXECUTABLE_PATHS@ )
-  set f=${install_prefix}/${d}
-  if ( -d ${f} ) then
-    setenv PATH ${f}:${PATH}
-  endif
-end
-
-if ( -d ${install_prefix}/Externals/lib/R ) then
-  setenv R_HOME ${install_prefix}/Externals/lib/R
-endif
-
-setenv GDAL_DATA ${install_prefix}/Externals/share/gdal
-setenv OPAL_PREFIX ${install_prefix}/Externals
-setenv LIBOVERLAY_SCROLLBAR 0
-
-setenv UVCDAT_SETUP_PATH ${install_prefix}
-
-unset install_prefix
-
-echo 'Successfully updated your environment to use UVCDAT'
-echo '(changes are valid for this session/terminal only)'
-echo 'Version: '${UVCDAT_PROMPT_STRING}
-echo 'Location: '${UVCDAT_SETUP_PATH}
-echo 'Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh'
diff --git a/CMake/cdat_modules_extra/setup_runtime.sh.in b/CMake/cdat_modules_extra/setup_runtime.sh.in
deleted file mode 100755
index 0476b092bfe1877df18f0b79e3f3a0ce310841ea..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/setup_runtime.sh.in
+++ /dev/null
@@ -1,111 +0,0 @@
-# Everything beyond this point will be determined relatively
-# from this path.
-install_prefix="@CMAKE_INSTALL_PREFIX@"
-# Reset previous uvcdat env messing up
-. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh
-
-# Saves what we will mess with
-for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do
-   tmp="${v}"
-    if [ -n "${!tmp}" ] ; then
-        export UVCDAT_ORIGINAL_${v}=${!v}
-    else
-        export UVCDAT_ORIGINAL_${v}=" "
-    fi
-done
-
-function cleanup {
-  unset cleanup install_prefix library_paths python_paths executable_paths
-}
-
-# Try to prevent the user from sourcing twice,
-# which can lead to errors.
-if [ -n "${UVCDAT_SETUP_PATH}" ] ; then
-  if [ "${UVCDAT_SETUP_PATH}" = "${install_prefix}" ] ; then
-    echo "Nothing to do since UVCDAT is already setup at: ${UVCDAT_SETUP_PATH}" 1>&2
-    cleanup
-    return 0
-  else
-    echo "ERROR: UVCDAT setup was previously sourced at: ${UVCDAT_SETUP_PATH}" 1>&2
-    echo "ERROR: There is no need to run setup_runtime manually anymore." 1>&2
-    echo "ERROR: Open a new shell in order to use a different install location." 1>&2
-    echo "ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh." 1>&2
-    cleanup
-    return 1
-  fi
-fi
-
-# Check that the install prefix exists, otherwise stop.
-if [ ! -d "${install_prefix}" ] ; then
-  echo "ERROR: ${install_prefix} is not a directory." 1>&2
-  cleanup
-  return 1
-fi
-
-# cmake set variables
-library_paths=( @SETUP_LIBRARY_PATHS@ )
-python_paths=( @SETUP_PYTHON_PATHS@ )
-executable_paths=( @SETUP_EXECUTABLE_PATHS@ )
-
-export UVCDAT_PROMPT_STRING=@UVCDAT_PROMPT_STRING@
-if [ "$UVCDAT_ENABLE_PROMPT_BEGINNING" ] ; then
-    export UVCDAT_ORIGINAL_PS1=${PS1}" "
-    export PS1="[@UVCDAT_PROMPT_STRING@]$PS1"
-
-elif [ "$UVCDAT_ENABLE_PROMPT_END" ] ; then
-    export UVCDAT_ORIGINAL_PS1=${PS1}" "
-    export PS1="$PS1[@UVCDAT_PROMPT_STRING@]"
-fi
-
-if [ -d '@QT_LIB_DIR@' ] ; then
-  LD_LIBRARY_PATH='@QT_LIB_DIR@:'"${LD_LIBRARY_PATH}"
-fi
-
-for d in "${library_paths[@]}" ; do
-  f="${install_prefix}/${d}"
-  if [ -d "${f}" ] ; then
-    LD_LIBRARY_PATH="${f}:${LD_LIBRARY_PATH}"
-  fi
-done
-
-if [ `uname` = 'Darwin' ] ; then
-  LD_LIBRARY_PATH="/usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH}"
-  export DYLD_FALLBACK_LIBRARY_PATH="${LD_LIBRARY_PATH}"
-fi
-
-for d in "${python_paths[@]}" ; do
-  f="${install_prefix}/${d}"
-  if [ -d "${f}" ] ; then
-    PYTHONPATH="${f}:${PYTHONPATH}"
-  fi
-  unset f
-done
-
-for d in "${executable_paths[@]}" ; do
-  f="${install_prefix}/${d}"
-  if [ -d "${f}" ] ; then
-    PATH="${f}:${PATH}"
-  fi
-  unset f
-done
-
-if [ -d "${install_prefix}/Externals/lib/R" ] ; then
-  export R_HOME="${install_prefix}/Externals/lib/R"
-fi
-
-export GDAL_DATA="${install_prefix}/Externals/share/gdal"
-export OPAL_PREFIX="${install_prefix}/Externals"
-export LIBOVERLAY_SCROLLBAR=0
-
-export PATH
-export LD_LIBRARY_PATH
-export PYTHONPATH
-
-export UVCDAT_SETUP_PATH="${install_prefix}"
-cleanup
-echo "Successfully updated your environment to use UVCDAT" 1>&2
-echo "(changes are valid for this session/terminal only)" 1>&2
-echo "Version: ${UVCDAT_PROMPT_STRING}" 1>&2
-echo "Location: ${UVCDAT_SETUP_PATH}" 1>&2
-echo "Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh" 1>&2
-return 0
diff --git a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in b/CMake/cdat_modules_extra/setuptools_install_step.cmake.in
deleted file mode 100644
index 0e5f477c546b1021743f97f01d2df4d55b3e33d0..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @setuptools_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in b/CMake/cdat_modules_extra/setuptools_make_step.cmake.in
deleted file mode 100644
index 7ddaec6a0c4b19c75c7b5101105a8275e315edff..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @setuptools_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/site.cfg.in b/CMake/cdat_modules_extra/site.cfg.in
deleted file mode 100644
index 1a250deb70f2ea6f4f0e3f45d207a871c6c04a5f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/site.cfg.in
+++ /dev/null
@@ -1,4 +0,0 @@
-[DEFAULT]
-library_dirs = @EXTERNALS@/lib
-include_dirs = @EXTERNALS@/include
-
diff --git a/CMake/cdat_modules_extra/udunits2_apple_configure.in b/CMake/cdat_modules_extra/udunits2_apple_configure.in
deleted file mode 100755
index 5bb7d2828c4e3606cf20561924c4196d05f0c9ba..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/udunits2_apple_configure.in
+++ /dev/null
@@ -1,18006 +0,0 @@
-#! /bin/sh
-# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.68 for UDUNITS 2.2.17.
-#
-# Report bugs to <support-udunits@unidata.ucar.edu>.
-#
-#
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
-# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
-# Foundation, Inc.
-#
-#
-# This configure script is free software; the Free Software Foundation
-# gives unlimited permission to copy, distribute and modify it.
-## -------------------- ##
-## M4sh Initialization. ##
-## -------------------- ##
-
-# Be more Bourne compatible
-DUALCASE=1; export DUALCASE # for MKS sh
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
-  emulate sh
-  NULLCMD=:
-  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
-  # is contrary to our usage.  Disable this feature.
-  alias -g '${1+"$@"}'='"$@"'
-  setopt NO_GLOB_SUBST
-else
-  case `(set -o) 2>/dev/null` in #(
-  *posix*) :
-    set -o posix ;; #(
-  *) :
-     ;;
-esac
-fi
-
-
-as_nl='
-'
-export as_nl
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
-# Prefer a ksh shell builtin over an external printf program on Solaris,
-# but without wasting forks for bash or zsh.
-if test -z "$BASH_VERSION$ZSH_VERSION" \
-    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='print -r --'
-  as_echo_n='print -rn --'
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='printf %s\n'
-  as_echo_n='printf %s'
-else
-  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
-    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
-    as_echo_n='/usr/ucb/echo -n'
-  else
-    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
-    as_echo_n_body='eval
-      arg=$1;
-      case $arg in #(
-      *"$as_nl"*)
-	expr "X$arg" : "X\\(.*\\)$as_nl";
-	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
-      esac;
-      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
-    '
-    export as_echo_n_body
-    as_echo_n='sh -c $as_echo_n_body as_echo'
-  fi
-  export as_echo_body
-  as_echo='sh -c $as_echo_body as_echo'
-fi
-
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
-  PATH_SEPARATOR=:
-  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
-    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
-      PATH_SEPARATOR=';'
-  }
-fi
-
-
-# IFS
-# We need space, tab and new line, in precisely that order.  Quoting is
-# there to prevent editors from complaining about space-tab.
-# (If _AS_PATH_WALK were called with IFS unset, it would disable word
-# splitting by setting IFS to empty value.)
-IFS=" ""	$as_nl"
-
-# Find who we are.  Look in the path if we contain no directory separator.
-as_myself=
-case $0 in #((
-  *[\\/]* ) as_myself=$0 ;;
-  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
-  done
-IFS=$as_save_IFS
-
-     ;;
-esac
-# We did not find ourselves, most probably we were run as `sh COMMAND'
-# in which case we are not to be found in the path.
-if test "x$as_myself" = x; then
-  as_myself=$0
-fi
-if test ! -f "$as_myself"; then
-  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
-  exit 1
-fi
-
-# Unset variables that we do not need and which cause bugs (e.g. in
-# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
-# suppresses any "Segmentation fault" message there.  '((' could
-# trigger a bug in pdksh 5.2.14.
-for as_var in BASH_ENV ENV MAIL MAILPATH
-do eval test x\${$as_var+set} = xset \
-  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# NLS nuisances.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# CDPATH.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-if test "x$CONFIG_SHELL" = x; then
-  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
-  emulate sh
-  NULLCMD=:
-  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
-  # is contrary to our usage.  Disable this feature.
-  alias -g '\${1+\"\$@\"}'='\"\$@\"'
-  setopt NO_GLOB_SUBST
-else
-  case \`(set -o) 2>/dev/null\` in #(
-  *posix*) :
-    set -o posix ;; #(
-  *) :
-     ;;
-esac
-fi
-"
-  as_required="as_fn_return () { (exit \$1); }
-as_fn_success () { as_fn_return 0; }
-as_fn_failure () { as_fn_return 1; }
-as_fn_ret_success () { return 0; }
-as_fn_ret_failure () { return 1; }
-
-exitcode=0
-as_fn_success || { exitcode=1; echo as_fn_success failed.; }
-as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
-as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
-as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
-if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
-
-else
-  exitcode=1; echo positional parameters were not saved.
-fi
-test x\$exitcode = x0 || exit 1"
-  as_suggested="  as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
-  as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
-  eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
-  test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
-test \$(( 1 + 1 )) = 2 || exit 1
-
-  test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || (
-    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
-    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
-    PATH=/empty FPATH=/empty; export PATH FPATH
-    test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\
-      || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1"
-  if (eval "$as_required") 2>/dev/null; then :
-  as_have_required=yes
-else
-  as_have_required=no
-fi
-  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
-
-else
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-as_found=false
-for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-  as_found=:
-  case $as_dir in #(
-	 /*)
-	   for as_base in sh bash ksh sh5; do
-	     # Try only shells that exist, to save several forks.
-	     as_shell=$as_dir/$as_base
-	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
-		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
-  CONFIG_SHELL=$as_shell as_have_required=yes
-		   if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
-  break 2
-fi
-fi
-	   done;;
-       esac
-  as_found=false
-done
-$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
-	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
-  CONFIG_SHELL=$SHELL as_have_required=yes
-fi; }
-IFS=$as_save_IFS
-
-
-      if test "x$CONFIG_SHELL" != x; then :
-  # We cannot yet assume a decent shell, so we have to provide a
-	# neutralization value for shells without unset; and this also
-	# works around shells that cannot unset nonexistent variables.
-	# Preserve -v and -x to the replacement shell.
-	BASH_ENV=/dev/null
-	ENV=/dev/null
-	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
-	export CONFIG_SHELL
-	case $- in # ((((
-	  *v*x* | *x*v* ) as_opts=-vx ;;
-	  *v* ) as_opts=-v ;;
-	  *x* ) as_opts=-x ;;
-	  * ) as_opts= ;;
-	esac
-	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
-fi
-
-    if test x$as_have_required = xno; then :
-  $as_echo "$0: This script requires a shell more modern than all"
-  $as_echo "$0: the shells that I found on your system."
-  if test x${ZSH_VERSION+set} = xset ; then
-    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
-    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
-  else
-    $as_echo "$0: Please tell bug-autoconf@gnu.org and
-$0: support-udunits@unidata.ucar.edu about your system,
-$0: including any error possibly output before this
-$0: message. Then install a modern shell, or manually run
-$0: the script under such a shell if you do have one."
-  fi
-  exit 1
-fi
-fi
-fi
-SHELL=${CONFIG_SHELL-/bin/sh}
-export SHELL
-# Unset more variables known to interfere with behavior of common tools.
-CLICOLOR_FORCE= GREP_OPTIONS=
-unset CLICOLOR_FORCE GREP_OPTIONS
-
-## --------------------- ##
-## M4sh Shell Functions. ##
-## --------------------- ##
-# as_fn_unset VAR
-# ---------------
-# Portably unset VAR.
-as_fn_unset ()
-{
-  { eval $1=; unset $1;}
-}
-as_unset=as_fn_unset
-
-# as_fn_set_status STATUS
-# -----------------------
-# Set $? to STATUS, without forking.
-as_fn_set_status ()
-{
-  return $1
-} # as_fn_set_status
-
-# as_fn_exit STATUS
-# -----------------
-# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
-as_fn_exit ()
-{
-  set +e
-  as_fn_set_status $1
-  exit $1
-} # as_fn_exit
-
-# as_fn_mkdir_p
-# -------------
-# Create "$as_dir" as a directory, including parents if necessary.
-as_fn_mkdir_p ()
-{
-
-  case $as_dir in #(
-  -*) as_dir=./$as_dir;;
-  esac
-  test -d "$as_dir" || eval $as_mkdir_p || {
-    as_dirs=
-    while :; do
-      case $as_dir in #(
-      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
-      *) as_qdir=$as_dir;;
-      esac
-      as_dirs="'$as_qdir' $as_dirs"
-      as_dir=`$as_dirname -- "$as_dir" ||
-$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$as_dir" : 'X\(//\)[^/]' \| \
-	 X"$as_dir" : 'X\(//\)$' \| \
-	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_dir" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-      test -d "$as_dir" && break
-    done
-    test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
-
-
-} # as_fn_mkdir_p
-# as_fn_append VAR VALUE
-# ----------------------
-# Append the text in VALUE to the end of the definition contained in VAR. Take
-# advantage of any shell optimizations that allow amortized linear growth over
-# repeated appends, instead of the typical quadratic growth present in naive
-# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
-  eval 'as_fn_append ()
-  {
-    eval $1+=\$2
-  }'
-else
-  as_fn_append ()
-  {
-    eval $1=\$$1\$2
-  }
-fi # as_fn_append
-
-# as_fn_arith ARG...
-# ------------------
-# Perform arithmetic evaluation on the ARGs, and store the result in the
-# global $as_val. Take advantage of shells that can avoid forks. The arguments
-# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
-  eval 'as_fn_arith ()
-  {
-    as_val=$(( $* ))
-  }'
-else
-  as_fn_arith ()
-  {
-    as_val=`expr "$@" || test $? -eq 1`
-  }
-fi # as_fn_arith
-
-
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
-# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
-# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
-as_fn_error ()
-{
-  as_status=$1; test $as_status -eq 0 && as_status=1
-  if test "$4"; then
-    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
-  fi
-  $as_echo "$as_me: error: $2" >&2
-  as_fn_exit $as_status
-} # as_fn_error
-
-if expr a : '\(a\)' >/dev/null 2>&1 &&
-   test "X`expr 00001 : '.*\(...\)'`" = X001; then
-  as_expr=expr
-else
-  as_expr=false
-fi
-
-if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
-  as_basename=basename
-else
-  as_basename=false
-fi
-
-if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
-  as_dirname=dirname
-else
-  as_dirname=false
-fi
-
-as_me=`$as_basename -- "$0" ||
-$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
-	 X"$0" : 'X\(//\)$' \| \
-	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X/"$0" |
-    sed '/^.*\/\([^/][^/]*\)\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-
-# Avoid depending upon Character Ranges.
-as_cr_letters='abcdefghijklmnopqrstuvwxyz'
-as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-as_cr_Letters=$as_cr_letters$as_cr_LETTERS
-as_cr_digits='0123456789'
-as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-
-  as_lineno_1=$LINENO as_lineno_1a=$LINENO
-  as_lineno_2=$LINENO as_lineno_2a=$LINENO
-  eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
-  test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
-  # Blame Lee E. McMahon (1931-1989) for sed's syntax.  :-)
-  sed -n '
-    p
-    /[$]LINENO/=
-  ' <$as_myself |
-    sed '
-      s/[$]LINENO.*/&-/
-      t lineno
-      b
-      :lineno
-      N
-      :loop
-      s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
-      t loop
-      s/-\n.*//
-    ' >$as_me.lineno &&
-  chmod +x "$as_me.lineno" ||
-    { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
-
-  # Don't try to exec as it changes $[0], causing all sort of problems
-  # (the dirname of $[0] is not the place where we might find the
-  # original and so on.  Autoconf is especially sensitive to this).
-  . "./$as_me.lineno"
-  # Exit status is that of the last command.
-  exit
-}
-
-ECHO_C= ECHO_N= ECHO_T=
-case `echo -n x` in #(((((
--n*)
-  case `echo 'xy\c'` in
-  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
-  xy)  ECHO_C='\c';;
-  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
-       ECHO_T='	';;
-  esac;;
-*)
-  ECHO_N='-n';;
-esac
-
-rm -f conf$$ conf$$.exe conf$$.file
-if test -d conf$$.dir; then
-  rm -f conf$$.dir/conf$$.file
-else
-  rm -f conf$$.dir
-  mkdir conf$$.dir 2>/dev/null
-fi
-if (echo >conf$$.file) 2>/dev/null; then
-  if ln -s conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s='ln -s'
-    # ... but there are two gotchas:
-    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
-    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
-    # In both cases, we have to default to `cp -p'.
-    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
-      as_ln_s='cp -p'
-  elif ln conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s=ln
-  else
-    as_ln_s='cp -p'
-  fi
-else
-  as_ln_s='cp -p'
-fi
-rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
-rmdir conf$$.dir 2>/dev/null
-
-if mkdir -p . 2>/dev/null; then
-  as_mkdir_p='mkdir -p "$as_dir"'
-else
-  test -d ./-p && rmdir ./-p
-  as_mkdir_p=false
-fi
-
-if test -x / >/dev/null 2>&1; then
-  as_test_x='test -x'
-else
-  if ls -dL / >/dev/null 2>&1; then
-    as_ls_L_option=L
-  else
-    as_ls_L_option=
-  fi
-  as_test_x='
-    eval sh -c '\''
-      if test -d "$1"; then
-	test -d "$1/.";
-      else
-	case $1 in #(
-	-*)set "./$1";;
-	esac;
-	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
-	???[sx]*):;;*)false;;esac;fi
-    '\'' sh
-  '
-fi
-as_executable_p=$as_test_x
-
-# Sed expression to map a string onto a valid CPP name.
-as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
-
-# Sed expression to map a string onto a valid variable name.
-as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
-
-SHELL=${CONFIG_SHELL-/bin/sh}
-
-
-test -n "$DJDIR" || exec 7<&0 </dev/null
-exec 6>&1
-
-# Name of the host.
-# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
-# so uname gets run too.
-ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
-
-#
-# Initializations.
-#
-ac_default_prefix=/usr/local
-ac_clean_files=
-ac_config_libobj_dir=.
-LIBOBJS=
-cross_compiling=no
-subdirs=
-MFLAGS=
-MAKEFLAGS=
-
-# Identity of this package.
-PACKAGE_NAME='UDUNITS'
-PACKAGE_TARNAME='udunits'
-PACKAGE_VERSION='2.2.17'
-PACKAGE_STRING='UDUNITS 2.2.17'
-PACKAGE_BUGREPORT='support-udunits@unidata.ucar.edu'
-PACKAGE_URL=''
-
-ac_unique_file="lib/converter.c"
-# Factoring default headers for most tests.
-ac_includes_default="\
-#include <stdio.h>
-#ifdef HAVE_SYS_TYPES_H
-# include <sys/types.h>
-#endif
-#ifdef HAVE_SYS_STAT_H
-# include <sys/stat.h>
-#endif
-#ifdef STDC_HEADERS
-# include <stdlib.h>
-# include <stddef.h>
-#else
-# ifdef HAVE_STDLIB_H
-#  include <stdlib.h>
-# endif
-#endif
-#ifdef HAVE_STRING_H
-# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
-#  include <memory.h>
-# endif
-# include <string.h>
-#endif
-#ifdef HAVE_STRINGS_H
-# include <strings.h>
-#endif
-#ifdef HAVE_INTTYPES_H
-# include <inttypes.h>
-#endif
-#ifdef HAVE_STDINT_H
-# include <stdint.h>
-#endif
-#ifdef HAVE_UNISTD_H
-# include <unistd.h>
-#endif"
-
-ac_subst_vars='am__EXEEXT_FALSE
-am__EXEEXT_TRUE
-LTLIBOBJS
-LIBOBJS
-OTOOL64
-OTOOL
-LIPO
-NMEDIT
-DSYMUTIL
-MANIFEST_TOOL
-RANLIB
-ac_ct_AR
-AR
-DLLTOOL
-OBJDUMP
-NM
-ac_ct_DUMPBIN
-DUMPBIN
-LD
-FGREP
-SED
-host_os
-host_vendor
-host_cpu
-host
-build_os
-build_vendor
-build_cpu
-build
-LIBTOOL
-HAVE_CUNIT_FALSE
-HAVE_CUNIT_TRUE
-LD_CUNIT
-EGREP
-GREP
-ac_ct_FC
-FCFLAGS
-FC
-LEXLIB
-LEX_OUTPUT_ROOT
-LEX
-YFLAGS
-YACC
-LN_S
-CPP
-am__fastdepCC_FALSE
-am__fastdepCC_TRUE
-CCDEPMODE
-AMDEPBACKSLASH
-AMDEP_FALSE
-AMDEP_TRUE
-am__quote
-am__include
-DEPDIR
-OBJEXT
-EXEEXT
-ac_ct_CC
-CPPFLAGS
-LDFLAGS
-CFLAGS
-CC
-ENABLE_UDUNITS_1_FALSE
-ENABLE_UDUNITS_1_TRUE
-DEBUG_FALSE
-DEBUG_TRUE
-LIBS_COVERAGE
-CFLAGS_COVERAGE
-am__untar
-am__tar
-AMTAR
-am__leading_dot
-SET_MAKE
-AWK
-mkdir_p
-MKDIR_P
-INSTALL_STRIP_PROGRAM
-STRIP
-install_sh
-MAKEINFO
-AUTOHEADER
-AUTOMAKE
-AUTOCONF
-ACLOCAL
-VERSION
-PACKAGE
-CYGPATH_W
-am__isrc
-INSTALL_DATA
-INSTALL_SCRIPT
-INSTALL_PROGRAM
-target_alias
-host_alias
-build_alias
-LIBS
-ECHO_T
-ECHO_N
-ECHO_C
-DEFS
-mandir
-localedir
-libdir
-psdir
-pdfdir
-dvidir
-htmldir
-infodir
-docdir
-oldincludedir
-includedir
-localstatedir
-sharedstatedir
-sysconfdir
-datadir
-datarootdir
-libexecdir
-sbindir
-bindir
-program_transform_name
-prefix
-exec_prefix
-PACKAGE_URL
-PACKAGE_BUGREPORT
-PACKAGE_STRING
-PACKAGE_VERSION
-PACKAGE_TARNAME
-PACKAGE_NAME
-PATH_SEPARATOR
-SHELL'
-ac_subst_files=''
-ac_user_opts='
-enable_option_checking
-enable_coverage
-enable_debug
-enable_udunits_1
-enable_dependency_tracking
-enable_shared
-enable_static
-with_pic
-enable_fast_install
-with_gnu_ld
-with_sysroot
-enable_libtool_lock
-'
-      ac_precious_vars='build_alias
-host_alias
-target_alias
-CC
-CFLAGS
-LDFLAGS
-LIBS
-CPPFLAGS
-CPP
-YACC
-YFLAGS
-FC
-FCFLAGS'
-
-
-# Initialize some variables set by options.
-ac_init_help=
-ac_init_version=false
-ac_unrecognized_opts=
-ac_unrecognized_sep=
-# The variables have the same names as the options, with
-# dashes changed to underlines.
-cache_file=/dev/null
-exec_prefix=NONE
-no_create=
-no_recursion=
-prefix=NONE
-program_prefix=NONE
-program_suffix=NONE
-program_transform_name=s,x,x,
-silent=
-site=
-srcdir=
-verbose=
-x_includes=NONE
-x_libraries=NONE
-
-# Installation directory options.
-# These are left unexpanded so users can "make install exec_prefix=/foo"
-# and all the variables that are supposed to be based on exec_prefix
-# by default will actually change.
-# Use braces instead of parens because sh, perl, etc. also accept them.
-# (The list follows the same order as the GNU Coding Standards.)
-bindir='${exec_prefix}/bin'
-sbindir='${exec_prefix}/sbin'
-libexecdir='${exec_prefix}/libexec'
-datarootdir='${prefix}/share'
-datadir='${datarootdir}'
-sysconfdir='${prefix}/etc'
-sharedstatedir='${prefix}/com'
-localstatedir='${prefix}/var'
-includedir='${prefix}/include'
-oldincludedir='/usr/include'
-docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
-infodir='${datarootdir}/info'
-htmldir='${docdir}'
-dvidir='${docdir}'
-pdfdir='${docdir}'
-psdir='${docdir}'
-libdir='${exec_prefix}/lib'
-localedir='${datarootdir}/locale'
-mandir='${datarootdir}/man'
-
-ac_prev=
-ac_dashdash=
-for ac_option
-do
-  # If the previous option needs an argument, assign it.
-  if test -n "$ac_prev"; then
-    eval $ac_prev=\$ac_option
-    ac_prev=
-    continue
-  fi
-
-  case $ac_option in
-  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
-  *=)   ac_optarg= ;;
-  *)    ac_optarg=yes ;;
-  esac
-
-  # Accept the important Cygnus configure options, so we can diagnose typos.
-
-  case $ac_dashdash$ac_option in
-  --)
-    ac_dashdash=yes ;;
-
-  -bindir | --bindir | --bindi | --bind | --bin | --bi)
-    ac_prev=bindir ;;
-  -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
-    bindir=$ac_optarg ;;
-
-  -build | --build | --buil | --bui | --bu)
-    ac_prev=build_alias ;;
-  -build=* | --build=* | --buil=* | --bui=* | --bu=*)
-    build_alias=$ac_optarg ;;
-
-  -cache-file | --cache-file | --cache-fil | --cache-fi \
-  | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
-    ac_prev=cache_file ;;
-  -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
-  | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
-    cache_file=$ac_optarg ;;
-
-  --config-cache | -C)
-    cache_file=config.cache ;;
-
-  -datadir | --datadir | --datadi | --datad)
-    ac_prev=datadir ;;
-  -datadir=* | --datadir=* | --datadi=* | --datad=*)
-    datadir=$ac_optarg ;;
-
-  -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
-  | --dataroo | --dataro | --datar)
-    ac_prev=datarootdir ;;
-  -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
-  | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
-    datarootdir=$ac_optarg ;;
-
-  -disable-* | --disable-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid feature name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"enable_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval enable_$ac_useropt=no ;;
-
-  -docdir | --docdir | --docdi | --doc | --do)
-    ac_prev=docdir ;;
-  -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
-    docdir=$ac_optarg ;;
-
-  -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
-    ac_prev=dvidir ;;
-  -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
-    dvidir=$ac_optarg ;;
-
-  -enable-* | --enable-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid feature name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"enable_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval enable_$ac_useropt=\$ac_optarg ;;
-
-  -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
-  | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
-  | --exec | --exe | --ex)
-    ac_prev=exec_prefix ;;
-  -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
-  | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
-  | --exec=* | --exe=* | --ex=*)
-    exec_prefix=$ac_optarg ;;
-
-  -gas | --gas | --ga | --g)
-    # Obsolete; use --with-gas.
-    with_gas=yes ;;
-
-  -help | --help | --hel | --he | -h)
-    ac_init_help=long ;;
-  -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
-    ac_init_help=recursive ;;
-  -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
-    ac_init_help=short ;;
-
-  -host | --host | --hos | --ho)
-    ac_prev=host_alias ;;
-  -host=* | --host=* | --hos=* | --ho=*)
-    host_alias=$ac_optarg ;;
-
-  -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
-    ac_prev=htmldir ;;
-  -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
-  | --ht=*)
-    htmldir=$ac_optarg ;;
-
-  -includedir | --includedir | --includedi | --included | --include \
-  | --includ | --inclu | --incl | --inc)
-    ac_prev=includedir ;;
-  -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
-  | --includ=* | --inclu=* | --incl=* | --inc=*)
-    includedir=$ac_optarg ;;
-
-  -infodir | --infodir | --infodi | --infod | --info | --inf)
-    ac_prev=infodir ;;
-  -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
-    infodir=$ac_optarg ;;
-
-  -libdir | --libdir | --libdi | --libd)
-    ac_prev=libdir ;;
-  -libdir=* | --libdir=* | --libdi=* | --libd=*)
-    libdir=$ac_optarg ;;
-
-  -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
-  | --libexe | --libex | --libe)
-    ac_prev=libexecdir ;;
-  -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
-  | --libexe=* | --libex=* | --libe=*)
-    libexecdir=$ac_optarg ;;
-
-  -localedir | --localedir | --localedi | --localed | --locale)
-    ac_prev=localedir ;;
-  -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
-    localedir=$ac_optarg ;;
-
-  -localstatedir | --localstatedir | --localstatedi | --localstated \
-  | --localstate | --localstat | --localsta | --localst | --locals)
-    ac_prev=localstatedir ;;
-  -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
-  | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
-    localstatedir=$ac_optarg ;;
-
-  -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
-    ac_prev=mandir ;;
-  -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
-    mandir=$ac_optarg ;;
-
-  -nfp | --nfp | --nf)
-    # Obsolete; use --without-fp.
-    with_fp=no ;;
-
-  -no-create | --no-create | --no-creat | --no-crea | --no-cre \
-  | --no-cr | --no-c | -n)
-    no_create=yes ;;
-
-  -no-recursion | --no-recursion | --no-recursio | --no-recursi \
-  | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
-    no_recursion=yes ;;
-
-  -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
-  | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
-  | --oldin | --oldi | --old | --ol | --o)
-    ac_prev=oldincludedir ;;
-  -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
-  | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
-  | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
-    oldincludedir=$ac_optarg ;;
-
-  -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
-    ac_prev=prefix ;;
-  -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
-    prefix=$ac_optarg ;;
-
-  -program-prefix | --program-prefix | --program-prefi | --program-pref \
-  | --program-pre | --program-pr | --program-p)
-    ac_prev=program_prefix ;;
-  -program-prefix=* | --program-prefix=* | --program-prefi=* \
-  | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
-    program_prefix=$ac_optarg ;;
-
-  -program-suffix | --program-suffix | --program-suffi | --program-suff \
-  | --program-suf | --program-su | --program-s)
-    ac_prev=program_suffix ;;
-  -program-suffix=* | --program-suffix=* | --program-suffi=* \
-  | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
-    program_suffix=$ac_optarg ;;
-
-  -program-transform-name | --program-transform-name \
-  | --program-transform-nam | --program-transform-na \
-  | --program-transform-n | --program-transform- \
-  | --program-transform | --program-transfor \
-  | --program-transfo | --program-transf \
-  | --program-trans | --program-tran \
-  | --progr-tra | --program-tr | --program-t)
-    ac_prev=program_transform_name ;;
-  -program-transform-name=* | --program-transform-name=* \
-  | --program-transform-nam=* | --program-transform-na=* \
-  | --program-transform-n=* | --program-transform-=* \
-  | --program-transform=* | --program-transfor=* \
-  | --program-transfo=* | --program-transf=* \
-  | --program-trans=* | --program-tran=* \
-  | --progr-tra=* | --program-tr=* | --program-t=*)
-    program_transform_name=$ac_optarg ;;
-
-  -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
-    ac_prev=pdfdir ;;
-  -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
-    pdfdir=$ac_optarg ;;
-
-  -psdir | --psdir | --psdi | --psd | --ps)
-    ac_prev=psdir ;;
-  -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
-    psdir=$ac_optarg ;;
-
-  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
-  | -silent | --silent | --silen | --sile | --sil)
-    silent=yes ;;
-
-  -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
-    ac_prev=sbindir ;;
-  -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
-  | --sbi=* | --sb=*)
-    sbindir=$ac_optarg ;;
-
-  -sharedstatedir | --sharedstatedir | --sharedstatedi \
-  | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
-  | --sharedst | --shareds | --shared | --share | --shar \
-  | --sha | --sh)
-    ac_prev=sharedstatedir ;;
-  -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
-  | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
-  | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
-  | --sha=* | --sh=*)
-    sharedstatedir=$ac_optarg ;;
-
-  -site | --site | --sit)
-    ac_prev=site ;;
-  -site=* | --site=* | --sit=*)
-    site=$ac_optarg ;;
-
-  -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
-    ac_prev=srcdir ;;
-  -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
-    srcdir=$ac_optarg ;;
-
-  -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
-  | --syscon | --sysco | --sysc | --sys | --sy)
-    ac_prev=sysconfdir ;;
-  -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
-  | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
-    sysconfdir=$ac_optarg ;;
-
-  -target | --target | --targe | --targ | --tar | --ta | --t)
-    ac_prev=target_alias ;;
-  -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
-    target_alias=$ac_optarg ;;
-
-  -v | -verbose | --verbose | --verbos | --verbo | --verb)
-    verbose=yes ;;
-
-  -version | --version | --versio | --versi | --vers | -V)
-    ac_init_version=: ;;
-
-  -with-* | --with-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid package name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"with_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval with_$ac_useropt=\$ac_optarg ;;
-
-  -without-* | --without-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid package name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"with_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval with_$ac_useropt=no ;;
-
-  --x)
-    # Obsolete; use --with-x.
-    with_x=yes ;;
-
-  -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
-  | --x-incl | --x-inc | --x-in | --x-i)
-    ac_prev=x_includes ;;
-  -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
-  | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
-    x_includes=$ac_optarg ;;
-
-  -x-libraries | --x-libraries | --x-librarie | --x-librari \
-  | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
-    ac_prev=x_libraries ;;
-  -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
-  | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
-    x_libraries=$ac_optarg ;;
-
-  -*) as_fn_error $? "unrecognized option: \`$ac_option'
-Try \`$0 --help' for more information"
-    ;;
-
-  *=*)
-    ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
-    # Reject names that are not valid shell variable names.
-    case $ac_envvar in #(
-      '' | [0-9]* | *[!_$as_cr_alnum]* )
-      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
-    esac
-    eval $ac_envvar=\$ac_optarg
-    export $ac_envvar ;;
-
-  *)
-    # FIXME: should be removed in autoconf 3.0.
-    $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
-    expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
-      $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
-    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
-    ;;
-
-  esac
-done
-
-if test -n "$ac_prev"; then
-  ac_option=--`echo $ac_prev | sed 's/_/-/g'`
-  as_fn_error $? "missing argument to $ac_option"
-fi
-
-if test -n "$ac_unrecognized_opts"; then
-  case $enable_option_checking in
-    no) ;;
-    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
-    *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
-  esac
-fi
-
-# Check all directory arguments for consistency.
-for ac_var in	exec_prefix prefix bindir sbindir libexecdir datarootdir \
-		datadir sysconfdir sharedstatedir localstatedir includedir \
-		oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
-		libdir localedir mandir
-do
-  eval ac_val=\$$ac_var
-  # Remove trailing slashes.
-  case $ac_val in
-    */ )
-      ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
-      eval $ac_var=\$ac_val;;
-  esac
-  # Be sure to have absolute directory names.
-  case $ac_val in
-    [\\/$]* | ?:[\\/]* )  continue;;
-    NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
-  esac
-  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
-done
-
-# There might be people who depend on the old broken behavior: `$host'
-# used to hold the argument of --host etc.
-# FIXME: To remove some day.
-build=$build_alias
-host=$host_alias
-target=$target_alias
-
-# FIXME: To remove some day.
-if test "x$host_alias" != x; then
-  if test "x$build_alias" = x; then
-    cross_compiling=maybe
-    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
-    If a cross compiler is detected then cross compile mode will be used" >&2
-  elif test "x$build_alias" != "x$host_alias"; then
-    cross_compiling=yes
-  fi
-fi
-
-ac_tool_prefix=
-test -n "$host_alias" && ac_tool_prefix=$host_alias-
-
-test "$silent" = yes && exec 6>/dev/null
-
-
-ac_pwd=`pwd` && test -n "$ac_pwd" &&
-ac_ls_di=`ls -di .` &&
-ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
-  as_fn_error $? "working directory cannot be determined"
-test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
-  as_fn_error $? "pwd does not report name of working directory"
-
-
-# Find the source files, if location was not specified.
-if test -z "$srcdir"; then
-  ac_srcdir_defaulted=yes
-  # Try the directory containing this script, then the parent directory.
-  ac_confdir=`$as_dirname -- "$as_myself" ||
-$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$as_myself" : 'X\(//\)[^/]' \| \
-	 X"$as_myself" : 'X\(//\)$' \| \
-	 X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_myself" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-  srcdir=$ac_confdir
-  if test ! -r "$srcdir/$ac_unique_file"; then
-    srcdir=..
-  fi
-else
-  ac_srcdir_defaulted=no
-fi
-if test ! -r "$srcdir/$ac_unique_file"; then
-  test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
-  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
-fi
-ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
-ac_abs_confdir=`(
-	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
-	pwd)`
-# When building in place, set srcdir=.
-if test "$ac_abs_confdir" = "$ac_pwd"; then
-  srcdir=.
-fi
-# Remove unnecessary trailing slashes from srcdir.
-# Double slashes in file names in object file debugging info
-# mess up M-x gdb in Emacs.
-case $srcdir in
-*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
-esac
-for ac_var in $ac_precious_vars; do
-  eval ac_env_${ac_var}_set=\${${ac_var}+set}
-  eval ac_env_${ac_var}_value=\$${ac_var}
-  eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
-  eval ac_cv_env_${ac_var}_value=\$${ac_var}
-done
-
-#
-# Report the --help message.
-#
-if test "$ac_init_help" = "long"; then
-  # Omit some internal or obsolete options to make the list less imposing.
-  # This message is too long to be a string in the A/UX 3.1 sh.
-  cat <<_ACEOF
-\`configure' configures UDUNITS 2.2.17 to adapt to many kinds of systems.
-
-Usage: $0 [OPTION]... [VAR=VALUE]...
-
-To assign environment variables (e.g., CC, CFLAGS...), specify them as
-VAR=VALUE.  See below for descriptions of some of the useful variables.
-
-Defaults for the options are specified in brackets.
-
-Configuration:
-  -h, --help              display this help and exit
-      --help=short        display options specific to this package
-      --help=recursive    display the short help of all the included packages
-  -V, --version           display version information and exit
-  -q, --quiet, --silent   do not print \`checking ...' messages
-      --cache-file=FILE   cache test results in FILE [disabled]
-  -C, --config-cache      alias for \`--cache-file=config.cache'
-  -n, --no-create         do not create output files
-      --srcdir=DIR        find the sources in DIR [configure dir or \`..']
-
-Installation directories:
-  --prefix=PREFIX         install architecture-independent files in PREFIX
-                          [$ac_default_prefix]
-  --exec-prefix=EPREFIX   install architecture-dependent files in EPREFIX
-                          [PREFIX]
-
-By default, \`make install' will install all the files in
-\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc.  You can specify
-an installation prefix other than \`$ac_default_prefix' using \`--prefix',
-for instance \`--prefix=\$HOME'.
-
-For better control, use the options below.
-
-Fine tuning of the installation directories:
-  --bindir=DIR            user executables [EPREFIX/bin]
-  --sbindir=DIR           system admin executables [EPREFIX/sbin]
-  --libexecdir=DIR        program executables [EPREFIX/libexec]
-  --sysconfdir=DIR        read-only single-machine data [PREFIX/etc]
-  --sharedstatedir=DIR    modifiable architecture-independent data [PREFIX/com]
-  --localstatedir=DIR     modifiable single-machine data [PREFIX/var]
-  --libdir=DIR            object code libraries [EPREFIX/lib]
-  --includedir=DIR        C header files [PREFIX/include]
-  --oldincludedir=DIR     C header files for non-gcc [/usr/include]
-  --datarootdir=DIR       read-only arch.-independent data root [PREFIX/share]
-  --datadir=DIR           read-only architecture-independent data [DATAROOTDIR]
-  --infodir=DIR           info documentation [DATAROOTDIR/info]
-  --localedir=DIR         locale-dependent data [DATAROOTDIR/locale]
-  --mandir=DIR            man documentation [DATAROOTDIR/man]
-  --docdir=DIR            documentation root [DATAROOTDIR/doc/udunits]
-  --htmldir=DIR           html documentation [DOCDIR]
-  --dvidir=DIR            dvi documentation [DOCDIR]
-  --pdfdir=DIR            pdf documentation [DOCDIR]
-  --psdir=DIR             ps documentation [DOCDIR]
-_ACEOF
-
-  cat <<\_ACEOF
-
-Program names:
-  --program-prefix=PREFIX            prepend PREFIX to installed program names
-  --program-suffix=SUFFIX            append SUFFIX to installed program names
-  --program-transform-name=PROGRAM   run sed PROGRAM on installed program names
-
-System types:
-  --build=BUILD     configure for building on BUILD [guessed]
-  --host=HOST       cross-compile to build programs to run on HOST [BUILD]
-_ACEOF
-fi
-
-if test -n "$ac_init_help"; then
-  case $ac_init_help in
-     short | recursive ) echo "Configuration of UDUNITS 2.2.17:";;
-   esac
-  cat <<\_ACEOF
-
-Optional Features:
-  --disable-option-checking  ignore unrecognized --enable/--with options
-  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
-  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]
-  --enable-coverage       Turn on code-coverage support
-  --enable-debug          Turn on debugging support
-  --disable-udunits-1     Turn off support for the UDUNITS-1 API
-                          [default=enabled]
-  --disable-dependency-tracking  speeds up one-time build
-  --enable-dependency-tracking   do not reject slow dependency extractors
-  --enable-shared[=PKGS]  build shared libraries [default=yes]
-  --enable-static[=PKGS]  build static libraries [default=yes]
-  --enable-fast-install[=PKGS]
-                          optimize for fast installation [default=yes]
-  --disable-libtool-lock  avoid locking (might break parallel builds)
-
-Optional Packages:
-  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
-  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)
-  --with-pic[=PKGS]       try to use only PIC/non-PIC objects [default=use
-                          both]
-  --with-gnu-ld           assume the C compiler uses GNU ld [default=no]
-  --with-sysroot=DIR Search for dependent libraries within DIR
-                        (or the compiler's sysroot if not specified).
-
-Some influential environment variables:
-  CC          C compiler command
-  CFLAGS      C compiler flags
-  LDFLAGS     linker flags, e.g. -L<lib dir> if you have libraries in a
-              nonstandard directory <lib dir>
-  LIBS        libraries to pass to the linker, e.g. -l<library>
-  CPPFLAGS    (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
-              you have headers in a nonstandard directory <include dir>
-  CPP         C preprocessor
-  YACC        The `Yet Another Compiler Compiler' implementation to use.
-              Defaults to the first program found out of: `bison -y', `byacc',
-              `yacc'.
-  YFLAGS      The list of arguments that will be passed by default to $YACC.
-              This script will default YFLAGS to the empty string to avoid a
-              default value of `-d' given by some make applications.
-  FC          Fortran compiler command
-  FCFLAGS     Fortran compiler flags
-
-Use these variables to override the choices made by `configure' or to help
-it to find libraries and programs with nonstandard names/locations.
-
-Report bugs to <support-udunits@unidata.ucar.edu>.
-_ACEOF
-ac_status=$?
-fi
-
-if test "$ac_init_help" = "recursive"; then
-  # If there are subdirs, report their specific --help.
-  for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
-    test -d "$ac_dir" ||
-      { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
-      continue
-    ac_builddir=.
-
-case "$ac_dir" in
-.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
-*)
-  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
-  # A ".." for each directory in $ac_dir_suffix.
-  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
-  case $ac_top_builddir_sub in
-  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
-  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
-  esac ;;
-esac
-ac_abs_top_builddir=$ac_pwd
-ac_abs_builddir=$ac_pwd$ac_dir_suffix
-# for backward compatibility:
-ac_top_builddir=$ac_top_build_prefix
-
-case $srcdir in
-  .)  # We are building in place.
-    ac_srcdir=.
-    ac_top_srcdir=$ac_top_builddir_sub
-    ac_abs_top_srcdir=$ac_pwd ;;
-  [\\/]* | ?:[\\/]* )  # Absolute name.
-    ac_srcdir=$srcdir$ac_dir_suffix;
-    ac_top_srcdir=$srcdir
-    ac_abs_top_srcdir=$srcdir ;;
-  *) # Relative name.
-    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
-    ac_top_srcdir=$ac_top_build_prefix$srcdir
-    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
-esac
-ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
-
-    cd "$ac_dir" || { ac_status=$?; continue; }
-    # Check for guested configure.
-    if test -f "$ac_srcdir/configure.gnu"; then
-      echo &&
-      $SHELL "$ac_srcdir/configure.gnu" --help=recursive
-    elif test -f "$ac_srcdir/configure"; then
-      echo &&
-      $SHELL "$ac_srcdir/configure" --help=recursive
-    else
-      $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
-    fi || ac_status=$?
-    cd "$ac_pwd" || { ac_status=$?; break; }
-  done
-fi
-
-test -n "$ac_init_help" && exit $ac_status
-if $ac_init_version; then
-  cat <<\_ACEOF
-UDUNITS configure 2.2.17
-generated by GNU Autoconf 2.68
-
-Copyright (C) 2010 Free Software Foundation, Inc.
-This configure script is free software; the Free Software Foundation
-gives unlimited permission to copy, distribute and modify it.
-_ACEOF
-  exit
-fi
-
-## ------------------------ ##
-## Autoconf initialization. ##
-## ------------------------ ##
-
-# ac_fn_c_try_compile LINENO
-# --------------------------
-# Try to compile conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_compile ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext
-  if { { ac_try="$ac_compile"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compile") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest.$ac_objext; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_compile
-
-# ac_fn_c_try_cpp LINENO
-# ----------------------
-# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_cpp ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if { { ac_try="$ac_cpp conftest.$ac_ext"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } > conftest.i && {
-	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-    ac_retval=1
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_cpp
-
-# ac_fn_c_try_link LINENO
-# -----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_link ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext conftest$ac_exeext
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest$ac_exeext && {
-	 test "$cross_compiling" = yes ||
-	 $as_test_x conftest$ac_exeext
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
-  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
-  # interfere with the next link command; also delete a directory that is
-  # left behind by Apple's compiler.  We do this before executing the actions.
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_link
-
-# ac_fn_fc_try_compile LINENO
-# ---------------------------
-# Try to compile conftest.$ac_ext, and return whether this succeeded.
-ac_fn_fc_try_compile ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext
-  if { { ac_try="$ac_compile"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compile") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_fc_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest.$ac_objext; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_fc_try_compile
-
-# ac_fn_c_try_run LINENO
-# ----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
-# that executables *can* be run.
-ac_fn_c_try_run ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
-  { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: program exited with status $ac_status" >&5
-       $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-       ac_retval=$ac_status
-fi
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_run
-
-# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES
-# -------------------------------------------------------
-# Tests whether HEADER exists, giving a warning if it cannot be compiled using
-# the include files in INCLUDES and setting the cache variable VAR
-# accordingly.
-ac_fn_c_check_header_mongrel ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if eval \${$3+:} false; then :
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-else
-  # Is the header compilable?
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5
-$as_echo_n "checking $2 usability... " >&6; }
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-#include <$2>
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_header_compiler=yes
-else
-  ac_header_compiler=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5
-$as_echo "$ac_header_compiler" >&6; }
-
-# Is the header present?
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5
-$as_echo_n "checking $2 presence... " >&6; }
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <$2>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-  ac_header_preproc=yes
-else
-  ac_header_preproc=no
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5
-$as_echo "$ac_header_preproc" >&6; }
-
-# So?  What about this header?
-case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #((
-  yes:no: )
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5
-$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
-$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-    ;;
-  no:yes:* )
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5
-$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2:     check for missing prerequisite headers?" >&5
-$as_echo "$as_me: WARNING: $2:     check for missing prerequisite headers?" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5
-$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&5
-$as_echo "$as_me: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
-$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-( $as_echo "## ----------------------------------------------- ##
-## Report this to support-udunits@unidata.ucar.edu ##
-## ----------------------------------------------- ##"
-     ) | sed "s/^/$as_me: WARNING:     /" >&2
-    ;;
-esac
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  eval "$3=\$ac_header_compiler"
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_header_mongrel
-
-# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
-# -------------------------------------------------------
-# Tests whether HEADER exists and can be compiled using the include files in
-# INCLUDES, setting the cache variable VAR accordingly.
-ac_fn_c_check_header_compile ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-#include <$2>
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  eval "$3=yes"
-else
-  eval "$3=no"
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_header_compile
-
-# ac_fn_c_check_type LINENO TYPE VAR INCLUDES
-# -------------------------------------------
-# Tests whether TYPE exists after having included INCLUDES, setting cache
-# variable VAR accordingly.
-ac_fn_c_check_type ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  eval "$3=no"
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-int
-main ()
-{
-if (sizeof ($2))
-	 return 0;
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-int
-main ()
-{
-if (sizeof (($2)))
-	    return 0;
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-
-else
-  eval "$3=yes"
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_type
-
-# ac_fn_c_check_func LINENO FUNC VAR
-# ----------------------------------
-# Tests whether FUNC exists, setting the cache variable VAR accordingly
-ac_fn_c_check_func ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-/* Define $2 to an innocuous variant, in case <limits.h> declares $2.
-   For example, HP-UX 11i <limits.h> declares gettimeofday.  */
-#define $2 innocuous_$2
-
-/* System header to define __stub macros and hopefully few prototypes,
-    which can conflict with char $2 (); below.
-    Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
-    <limits.h> exists even on freestanding compilers.  */
-
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-
-#undef $2
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char $2 ();
-/* The GNU C library defines this for functions which it implements
-    to always fail with ENOSYS.  Some functions are actually named
-    something starting with __ and the normal name is an alias.  */
-#if defined __stub_$2 || defined __stub___$2
-choke me
-#endif
-
-int
-main ()
-{
-return $2 ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  eval "$3=yes"
-else
-  eval "$3=no"
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_func
-
-# ac_fn_fc_try_link LINENO
-# ------------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_fc_try_link ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext conftest$ac_exeext
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_fc_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest$ac_exeext && {
-	 test "$cross_compiling" = yes ||
-	 $as_test_x conftest$ac_exeext
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
-  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
-  # interfere with the next link command; also delete a directory that is
-  # left behind by Apple's compiler.  We do this before executing the actions.
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_fc_try_link
-cat >config.log <<_ACEOF
-This file contains any messages produced by compilers while
-running configure, to aid debugging if configure makes a mistake.
-
-It was created by UDUNITS $as_me 2.2.17, which was
-generated by GNU Autoconf 2.68.  Invocation command line was
-
-  $ $0 $@
-
-_ACEOF
-exec 5>>config.log
-{
-cat <<_ASUNAME
-## --------- ##
-## Platform. ##
-## --------- ##
-
-hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
-uname -m = `(uname -m) 2>/dev/null || echo unknown`
-uname -r = `(uname -r) 2>/dev/null || echo unknown`
-uname -s = `(uname -s) 2>/dev/null || echo unknown`
-uname -v = `(uname -v) 2>/dev/null || echo unknown`
-
-/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
-/bin/uname -X     = `(/bin/uname -X) 2>/dev/null     || echo unknown`
-
-/bin/arch              = `(/bin/arch) 2>/dev/null              || echo unknown`
-/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null       || echo unknown`
-/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
-/usr/bin/hostinfo      = `(/usr/bin/hostinfo) 2>/dev/null      || echo unknown`
-/bin/machine           = `(/bin/machine) 2>/dev/null           || echo unknown`
-/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null       || echo unknown`
-/bin/universe          = `(/bin/universe) 2>/dev/null          || echo unknown`
-
-_ASUNAME
-
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    $as_echo "PATH: $as_dir"
-  done
-IFS=$as_save_IFS
-
-} >&5
-
-cat >&5 <<_ACEOF
-
-
-## ----------- ##
-## Core tests. ##
-## ----------- ##
-
-_ACEOF
-
-
-# Keep a trace of the command line.
-# Strip out --no-create and --no-recursion so they do not pile up.
-# Strip out --silent because we don't want to record it for future runs.
-# Also quote any args containing shell meta-characters.
-# Make two passes to allow for proper duplicate-argument suppression.
-ac_configure_args=
-ac_configure_args0=
-ac_configure_args1=
-ac_must_keep_next=false
-for ac_pass in 1 2
-do
-  for ac_arg
-  do
-    case $ac_arg in
-    -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
-    -q | -quiet | --quiet | --quie | --qui | --qu | --q \
-    | -silent | --silent | --silen | --sile | --sil)
-      continue ;;
-    *\'*)
-      ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    esac
-    case $ac_pass in
-    1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
-    2)
-      as_fn_append ac_configure_args1 " '$ac_arg'"
-      if test $ac_must_keep_next = true; then
-	ac_must_keep_next=false # Got value, back to normal.
-      else
-	case $ac_arg in
-	  *=* | --config-cache | -C | -disable-* | --disable-* \
-	  | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
-	  | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
-	  | -with-* | --with-* | -without-* | --without-* | --x)
-	    case "$ac_configure_args0 " in
-	      "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
-	    esac
-	    ;;
-	  -* ) ac_must_keep_next=true ;;
-	esac
-      fi
-      as_fn_append ac_configure_args " '$ac_arg'"
-      ;;
-    esac
-  done
-done
-{ ac_configure_args0=; unset ac_configure_args0;}
-{ ac_configure_args1=; unset ac_configure_args1;}
-
-# When interrupted or exit'd, cleanup temporary files, and complete
-# config.log.  We remove comments because anyway the quotes in there
-# would cause problems or look ugly.
-# WARNING: Use '\'' to represent an apostrophe within the trap.
-# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
-trap 'exit_status=$?
-  # Save into config.log some information that might help in debugging.
-  {
-    echo
-
-    $as_echo "## ---------------- ##
-## Cache variables. ##
-## ---------------- ##"
-    echo
-    # The following way of writing the cache mishandles newlines in values,
-(
-  for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
-    eval ac_val=\$$ac_var
-    case $ac_val in #(
-    *${as_nl}*)
-      case $ac_var in #(
-      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
-      esac
-      case $ac_var in #(
-      _ | IFS | as_nl) ;; #(
-      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
-      *) { eval $ac_var=; unset $ac_var;} ;;
-      esac ;;
-    esac
-  done
-  (set) 2>&1 |
-    case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
-    *${as_nl}ac_space=\ *)
-      sed -n \
-	"s/'\''/'\''\\\\'\'''\''/g;
-	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
-      ;; #(
-    *)
-      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
-      ;;
-    esac |
-    sort
-)
-    echo
-
-    $as_echo "## ----------------- ##
-## Output variables. ##
-## ----------------- ##"
-    echo
-    for ac_var in $ac_subst_vars
-    do
-      eval ac_val=\$$ac_var
-      case $ac_val in
-      *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
-      esac
-      $as_echo "$ac_var='\''$ac_val'\''"
-    done | sort
-    echo
-
-    if test -n "$ac_subst_files"; then
-      $as_echo "## ------------------- ##
-## File substitutions. ##
-## ------------------- ##"
-      echo
-      for ac_var in $ac_subst_files
-      do
-	eval ac_val=\$$ac_var
-	case $ac_val in
-	*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
-	esac
-	$as_echo "$ac_var='\''$ac_val'\''"
-      done | sort
-      echo
-    fi
-
-    if test -s confdefs.h; then
-      $as_echo "## ----------- ##
-## confdefs.h. ##
-## ----------- ##"
-      echo
-      cat confdefs.h
-      echo
-    fi
-    test "$ac_signal" != 0 &&
-      $as_echo "$as_me: caught signal $ac_signal"
-    $as_echo "$as_me: exit $exit_status"
-  } >&5
-  rm -f core *.core core.conftest.* &&
-    rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
-    exit $exit_status
-' 0
-for ac_signal in 1 2 13 15; do
-  trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
-done
-ac_signal=0
-
-# confdefs.h avoids OS command line length limits that DEFS can exceed.
-rm -f -r conftest* confdefs.h
-
-$as_echo "/* confdefs.h */" > confdefs.h
-
-# Predefined preprocessor variables.
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_NAME "$PACKAGE_NAME"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_VERSION "$PACKAGE_VERSION"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_STRING "$PACKAGE_STRING"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_URL "$PACKAGE_URL"
-_ACEOF
-
-
-# Let the site file select an alternate cache file if it wants to.
-# Prefer an explicitly selected file to automatically selected ones.
-ac_site_file1=NONE
-ac_site_file2=NONE
-if test -n "$CONFIG_SITE"; then
-  # We do not want a PATH search for config.site.
-  case $CONFIG_SITE in #((
-    -*)  ac_site_file1=./$CONFIG_SITE;;
-    */*) ac_site_file1=$CONFIG_SITE;;
-    *)   ac_site_file1=./$CONFIG_SITE;;
-  esac
-elif test "x$prefix" != xNONE; then
-  ac_site_file1=$prefix/share/config.site
-  ac_site_file2=$prefix/etc/config.site
-else
-  ac_site_file1=$ac_default_prefix/share/config.site
-  ac_site_file2=$ac_default_prefix/etc/config.site
-fi
-for ac_site_file in "$ac_site_file1" "$ac_site_file2"
-do
-  test "x$ac_site_file" = xNONE && continue
-  if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
-$as_echo "$as_me: loading site script $ac_site_file" >&6;}
-    sed 's/^/| /' "$ac_site_file" >&5
-    . "$ac_site_file" \
-      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "failed to load site script $ac_site_file
-See \`config.log' for more details" "$LINENO" 5; }
-  fi
-done
-
-if test -r "$cache_file"; then
-  # Some versions of bash will fail to source /dev/null (special files
-  # actually), so we avoid doing that.  DJGPP emulates it as a regular file.
-  if test /dev/null != "$cache_file" && test -f "$cache_file"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
-$as_echo "$as_me: loading cache $cache_file" >&6;}
-    case $cache_file in
-      [\\/]* | ?:[\\/]* ) . "$cache_file";;
-      *)                      . "./$cache_file";;
-    esac
-  fi
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
-$as_echo "$as_me: creating cache $cache_file" >&6;}
-  >$cache_file
-fi
-
-# Check that the precious variables saved in the cache have kept the same
-# value.
-ac_cache_corrupted=false
-for ac_var in $ac_precious_vars; do
-  eval ac_old_set=\$ac_cv_env_${ac_var}_set
-  eval ac_new_set=\$ac_env_${ac_var}_set
-  eval ac_old_val=\$ac_cv_env_${ac_var}_value
-  eval ac_new_val=\$ac_env_${ac_var}_value
-  case $ac_old_set,$ac_new_set in
-    set,)
-      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
-$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
-      ac_cache_corrupted=: ;;
-    ,set)
-      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
-$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
-      ac_cache_corrupted=: ;;
-    ,);;
-    *)
-      if test "x$ac_old_val" != "x$ac_new_val"; then
-	# differences in whitespace do not lead to failure.
-	ac_old_val_w=`echo x $ac_old_val`
-	ac_new_val_w=`echo x $ac_new_val`
-	if test "$ac_old_val_w" != "$ac_new_val_w"; then
-	  { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
-$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
-	  ac_cache_corrupted=:
-	else
-	  { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
-$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
-	  eval $ac_var=\$ac_old_val
-	fi
-	{ $as_echo "$as_me:${as_lineno-$LINENO}:   former value:  \`$ac_old_val'" >&5
-$as_echo "$as_me:   former value:  \`$ac_old_val'" >&2;}
-	{ $as_echo "$as_me:${as_lineno-$LINENO}:   current value: \`$ac_new_val'" >&5
-$as_echo "$as_me:   current value: \`$ac_new_val'" >&2;}
-      fi;;
-  esac
-  # Pass precious variables to config.status.
-  if test "$ac_new_set" = set; then
-    case $ac_new_val in
-    *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
-    *) ac_arg=$ac_var=$ac_new_val ;;
-    esac
-    case " $ac_configure_args " in
-      *" '$ac_arg' "*) ;; # Avoid dups.  Use of quotes ensures accuracy.
-      *) as_fn_append ac_configure_args " '$ac_arg'" ;;
-    esac
-  fi
-done
-if $ac_cache_corrupted; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-  { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
-$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
-  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
-fi
-## -------------------- ##
-## Main body of script. ##
-## -------------------- ##
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-
-ac_aux_dir=
-for ac_dir in build-aux "$srcdir"/build-aux; do
-  if test -f "$ac_dir/install-sh"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/install-sh -c"
-    break
-  elif test -f "$ac_dir/install.sh"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/install.sh -c"
-    break
-  elif test -f "$ac_dir/shtool"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/shtool install -c"
-    break
-  fi
-done
-if test -z "$ac_aux_dir"; then
-  as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5
-fi
-
-# These three variables are undocumented and unsupported,
-# and are intended to be withdrawn in a future Autoconf release.
-# They can cause serious problems if a builder's source tree is in a directory
-# whose full name contains unusual characters.
-ac_config_guess="$SHELL $ac_aux_dir/config.guess"  # Please don't use this var.
-ac_config_sub="$SHELL $ac_aux_dir/config.sub"  # Please don't use this var.
-ac_configure="$SHELL $ac_aux_dir/configure"  # Please don't use this var.
-
-
-
-am__api_version='1.11'
-
-# Find a good install program.  We prefer a C program (faster),
-# so one script is as good as another.  But avoid the broken or
-# incompatible versions:
-# SysV /etc/install, /usr/sbin/install
-# SunOS /usr/etc/install
-# IRIX /sbin/install
-# AIX /bin/install
-# AmigaOS /C/install, which installs bootblocks on floppy discs
-# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
-# AFS /usr/afsws/bin/install, which mishandles nonexistent args
-# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
-# OS/2's system install, which has a completely different semantic
-# ./install, which can be erroneously created by make from ./install.sh.
-# Reject install programs that cannot install multiple files.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
-$as_echo_n "checking for a BSD-compatible install... " >&6; }
-if test -z "$INSTALL"; then
-if ${ac_cv_path_install+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    # Account for people who put trailing slashes in PATH elements.
-case $as_dir/ in #((
-  ./ | .// | /[cC]/* | \
-  /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
-  ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
-  /usr/ucb/* ) ;;
-  *)
-    # OSF1 and SCO ODT 3.0 have their own names for install.
-    # Don't use installbsd from OSF since it installs stuff as root
-    # by default.
-    for ac_prog in ginstall scoinst install; do
-      for ac_exec_ext in '' $ac_executable_extensions; do
-	if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then
-	  if test $ac_prog = install &&
-	    grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
-	    # AIX install.  It has an incompatible calling convention.
-	    :
-	  elif test $ac_prog = install &&
-	    grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
-	    # program-specific install script used by HP pwplus--don't use.
-	    :
-	  else
-	    rm -rf conftest.one conftest.two conftest.dir
-	    echo one > conftest.one
-	    echo two > conftest.two
-	    mkdir conftest.dir
-	    if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
-	      test -s conftest.one && test -s conftest.two &&
-	      test -s conftest.dir/conftest.one &&
-	      test -s conftest.dir/conftest.two
-	    then
-	      ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
-	      break 3
-	    fi
-	  fi
-	fi
-      done
-    done
-    ;;
-esac
-
-  done
-IFS=$as_save_IFS
-
-rm -rf conftest.one conftest.two conftest.dir
-
-fi
-  if test "${ac_cv_path_install+set}" = set; then
-    INSTALL=$ac_cv_path_install
-  else
-    # As a last resort, use the slow shell script.  Don't cache a
-    # value for INSTALL within a source directory, because that will
-    # break other packages using the cache if that directory is
-    # removed, or if the value is a relative name.
-    INSTALL=$ac_install_sh
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
-$as_echo "$INSTALL" >&6; }
-
-# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
-# It thinks the first close brace ends the variable substitution.
-test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
-
-test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
-
-test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
-$as_echo_n "checking whether build environment is sane... " >&6; }
-# Just in case
-sleep 1
-echo timestamp > conftest.file
-# Reject unsafe characters in $srcdir or the absolute working directory
-# name.  Accept space and tab only in the latter.
-am_lf='
-'
-case `pwd` in
-  *[\\\"\#\$\&\'\`$am_lf]*)
-    as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;;
-esac
-case $srcdir in
-  *[\\\"\#\$\&\'\`$am_lf\ \	]*)
-    as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;;
-esac
-
-# Do `set' in a subshell so we don't clobber the current shell's
-# arguments.  Must try -L first in case configure is actually a
-# symlink; some systems play weird games with the mod time of symlinks
-# (eg FreeBSD returns the mod time of the symlink's containing
-# directory).
-if (
-   set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
-   if test "$*" = "X"; then
-      # -L didn't work.
-      set X `ls -t "$srcdir/configure" conftest.file`
-   fi
-   rm -f conftest.file
-   if test "$*" != "X $srcdir/configure conftest.file" \
-      && test "$*" != "X conftest.file $srcdir/configure"; then
-
-      # If neither matched, then we have a broken ls.  This can happen
-      # if, for instance, CONFIG_SHELL is bash and it inherits a
-      # broken ls alias from the environment.  This has actually
-      # happened.  Such a system could not be considered "sane".
-      as_fn_error $? "ls -t appears to fail.  Make sure there is not a broken
-alias in your environment" "$LINENO" 5
-   fi
-
-   test "$2" = conftest.file
-   )
-then
-   # Ok.
-   :
-else
-   as_fn_error $? "newly created file is older than distributed files!
-Check your system clock" "$LINENO" 5
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-test "$program_prefix" != NONE &&
-  program_transform_name="s&^&$program_prefix&;$program_transform_name"
-# Use a double $ so make ignores it.
-test "$program_suffix" != NONE &&
-  program_transform_name="s&\$&$program_suffix&;$program_transform_name"
-# Double any \ or $.
-# By default was `s,x,x', remove it if useless.
-ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
-program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
-
-# expand $ac_aux_dir to an absolute path
-am_aux_dir=`cd $ac_aux_dir && pwd`
-
-if test x"${MISSING+set}" != xset; then
-  case $am_aux_dir in
-  *\ * | *\	*)
-    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
-  *)
-    MISSING="\${SHELL} $am_aux_dir/missing" ;;
-  esac
-fi
-# Use eval to expand $SHELL
-if eval "$MISSING --run true"; then
-  am_missing_run="$MISSING --run "
-else
-  am_missing_run=
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5
-$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;}
-fi
-
-if test x"${install_sh}" != xset; then
-  case $am_aux_dir in
-  *\ * | *\	*)
-    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
-  *)
-    install_sh="\${SHELL} $am_aux_dir/install-sh"
-  esac
-fi
-
-# Installed binaries are usually stripped using `strip' when the user
-# run `make install-strip'.  However `strip' might not be the right
-# tool to use in cross-compilation environments, therefore Automake
-# will honor the `STRIP' environment variable to overrule this program.
-if test "$cross_compiling" != no; then
-  if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-set dummy ${ac_tool_prefix}strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$STRIP"; then
-  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-STRIP=$ac_cv_prog_STRIP
-if test -n "$STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
-$as_echo "$STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_STRIP"; then
-  ac_ct_STRIP=$STRIP
-  # Extract the first word of "strip", so it can be a program name with args.
-set dummy strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_STRIP"; then
-  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_STRIP="strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
-if test -n "$ac_ct_STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
-$as_echo "$ac_ct_STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_STRIP" = x; then
-    STRIP=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    STRIP=$ac_ct_STRIP
-  fi
-else
-  STRIP="$ac_cv_prog_STRIP"
-fi
-
-fi
-INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
-$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
-if test -z "$MKDIR_P"; then
-  if ${ac_cv_path_mkdir+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in mkdir gmkdir; do
-	 for ac_exec_ext in '' $ac_executable_extensions; do
-	   { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue
-	   case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
-	     'mkdir (GNU coreutils) '* | \
-	     'mkdir (coreutils) '* | \
-	     'mkdir (fileutils) '4.1*)
-	       ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
-	       break 3;;
-	   esac
-	 done
-       done
-  done
-IFS=$as_save_IFS
-
-fi
-
-  test -d ./--version && rmdir ./--version
-  if test "${ac_cv_path_mkdir+set}" = set; then
-    MKDIR_P="$ac_cv_path_mkdir -p"
-  else
-    # As a last resort, use the slow shell script.  Don't cache a
-    # value for MKDIR_P within a source directory, because that will
-    # break other packages using the cache if that directory is
-    # removed, or if the value is a relative name.
-    MKDIR_P="$ac_install_sh -d"
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
-$as_echo "$MKDIR_P" >&6; }
-
-mkdir_p="$MKDIR_P"
-case $mkdir_p in
-  [\\/$]* | ?:[\\/]*) ;;
-  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
-esac
-
-for ac_prog in gawk mawk nawk awk
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AWK+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$AWK"; then
-  ac_cv_prog_AWK="$AWK" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_AWK="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-AWK=$ac_cv_prog_AWK
-if test -n "$AWK"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
-$as_echo "$AWK" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$AWK" && break
-done
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
-$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
-set x ${MAKE-make}
-ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
-if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat >conftest.make <<\_ACEOF
-SHELL = /bin/sh
-all:
-	@echo '@@@%%%=$(MAKE)=@@@%%%'
-_ACEOF
-# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
-case `${MAKE-make} -f conftest.make 2>/dev/null` in
-  *@@@%%%=?*=@@@%%%*)
-    eval ac_cv_prog_make_${ac_make}_set=yes;;
-  *)
-    eval ac_cv_prog_make_${ac_make}_set=no;;
-esac
-rm -f conftest.make
-fi
-if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-  SET_MAKE=
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-  SET_MAKE="MAKE=${MAKE-make}"
-fi
-
-rm -rf .tst 2>/dev/null
-mkdir .tst 2>/dev/null
-if test -d .tst; then
-  am__leading_dot=.
-else
-  am__leading_dot=_
-fi
-rmdir .tst 2>/dev/null
-
-if test "`cd $srcdir && pwd`" != "`pwd`"; then
-  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
-  # is not polluted with repeated "-I."
-  am__isrc=' -I$(srcdir)'
-  # test to see if srcdir already configured
-  if test -f $srcdir/config.status; then
-    as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
-  fi
-fi
-
-# test whether we have cygpath
-if test -z "$CYGPATH_W"; then
-  if (cygpath --version) >/dev/null 2>/dev/null; then
-    CYGPATH_W='cygpath -w'
-  else
-    CYGPATH_W=echo
-  fi
-fi
-
-
-# Define the identity of the package.
- PACKAGE='udunits'
- VERSION='2.2.17'
-
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE "$PACKAGE"
-_ACEOF
-
-
-cat >>confdefs.h <<_ACEOF
-#define VERSION "$VERSION"
-_ACEOF
-
-# Some tools Automake needs.
-
-ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
-
-
-AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
-
-
-AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
-
-
-AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
-
-
-MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
-
-# We need awk for the "check" target.  The system "awk" is bad on
-# some platforms.
-# Always define AMTAR for backward compatibility.
-
-AMTAR=${AMTAR-"${am_missing_run}tar"}
-
-am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'
-
-
-
-
-
-ac_config_headers="$ac_config_headers config.h"
-
-
-CFLAGS_COVERAGE=''
-LIBS_COVERAGE=''
-
-# Check whether --enable-coverage was given.
-if test "${enable_coverage+set}" = set; then :
-  enableval=$enable_coverage; case "${enableval}" in
-  yes) CFLAGS_COVERAGE='--coverage'
-       LIBS_COVERAGE=-lgcov
-       coverage_enabled=true;;
-  no) ;;
-  *) as_fn_error $? "bad value ${enableval} for --enable-coverage" "$LINENO" 5 ;;
-esac
-fi
-
-
-
-
-# Check whether --enable-debug was given.
-if test "${enable_debug+set}" = set; then :
-  enableval=$enable_debug; case "${enableval}" in
-  yes)
-    CFLAGS="-g${CFLAGS:+ $CFLAGS}"
-    debug=true ;;
-  no)
-    CFLAGS="-O${CFLAGS:+ $CFLAGS}"
-    debug=false ;;
-  *) as_fn_error $? "bad value ${enableval} for --enable-debug" "$LINENO" 5 ;;
-esac
-else
-  if test "$coverage_enabled" = true; then
-    CFLAGS="-g${CFLAGS:+ $CFLAGS}"
-    debug=true
-else
-    debug=false
-fi
-
-fi
-
- if test x$debug = xtrue; then
-  DEBUG_TRUE=
-  DEBUG_FALSE='#'
-else
-  DEBUG_TRUE='#'
-  DEBUG_FALSE=
-fi
-
-
- if true; then
-  ENABLE_UDUNITS_1_TRUE=
-  ENABLE_UDUNITS_1_FALSE='#'
-else
-  ENABLE_UDUNITS_1_TRUE='#'
-  ENABLE_UDUNITS_1_FALSE=
-fi
-
-# Check whether --enable-udunits-1 was given.
-if test "${enable_udunits_1+set}" = set; then :
-  enableval=$enable_udunits_1; case "${enableval}" in
-      no)    if false; then
-  ENABLE_UDUNITS_1_TRUE=
-  ENABLE_UDUNITS_1_FALSE='#'
-else
-  ENABLE_UDUNITS_1_TRUE='#'
-  ENABLE_UDUNITS_1_FALSE=
-fi
- ;;
-      yes)  ;;
-      *)    as_fn_error $? "bad value ${enableval} for --enable-udunits-1" "$LINENO" 5 ;;
-    esac
-fi
-
-
-# Ensure that compilation is optimized and with assertions disabled by default.
-CFLAGS=${CFLAGS:--O}
-CPPFLAGS=${CPPFLAGS:--DNDEBUG}
-
-# The default absolute pathname of the installed units database. "pkgdatadir"
-# isn't a configure-variable in the normal sense: it doesn't appear in
-# "config.status" yet appears in "Makefile"; consequently, the following
-# nonsense just to avoid defining the pathname in the makefile so that Eclipse
-# is happy.
-pkgdatadir=$(eval echo $(eval echo `echo ${datadir}`/${PACKAGE}))
-
-cat >>confdefs.h <<_ACEOF
-#define DEFAULT_UDUNITS2_XML_PATH "${pkgdatadir}/udunits2.xml"
-_ACEOF
-
-
-# Checks for programs.
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
-set dummy ${ac_tool_prefix}gcc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_CC="${ac_tool_prefix}gcc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_CC"; then
-  ac_ct_CC=$CC
-  # Extract the first word of "gcc", so it can be a program name with args.
-set dummy gcc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_CC"; then
-  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_CC="gcc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-$as_echo "$ac_ct_CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_CC" = x; then
-    CC=""
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    CC=$ac_ct_CC
-  fi
-else
-  CC="$ac_cv_prog_CC"
-fi
-
-if test -z "$CC"; then
-          if test -n "$ac_tool_prefix"; then
-    # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
-set dummy ${ac_tool_prefix}cc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_CC="${ac_tool_prefix}cc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  fi
-fi
-if test -z "$CC"; then
-  # Extract the first word of "cc", so it can be a program name with args.
-set dummy cc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-  ac_prog_rejected=no
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
-       ac_prog_rejected=yes
-       continue
-     fi
-    ac_cv_prog_CC="cc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-if test $ac_prog_rejected = yes; then
-  # We found a bogon in the path, so make sure we never use it.
-  set dummy $ac_cv_prog_CC
-  shift
-  if test $# != 0; then
-    # We chose a different compiler from the bogus one.
-    # However, it has the same basename, so the bogon will be chosen
-    # first if we set CC to just the basename; use the full file name.
-    shift
-    ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
-  fi
-fi
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$CC"; then
-  if test -n "$ac_tool_prefix"; then
-  for ac_prog in cl.exe
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$CC" && break
-  done
-fi
-if test -z "$CC"; then
-  ac_ct_CC=$CC
-  for ac_prog in cl.exe
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_CC"; then
-  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_CC="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-$as_echo "$ac_ct_CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_CC" && break
-done
-
-  if test "x$ac_ct_CC" = x; then
-    CC=""
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    CC=$ac_ct_CC
-  fi
-fi
-
-fi
-
-
-test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "no acceptable C compiler found in \$PATH
-See \`config.log' for more details" "$LINENO" 5; }
-
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
-  { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    sed '10a\
-... rest of stderr output deleted ...
-         10q' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-  fi
-  rm -f conftest.er1 conftest.err
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-done
-
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
-# Try to create an executable without -o first, disregard a.out.
-# It will help us diagnose broken compilers, and finding out an intuition
-# of exeext.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
-$as_echo_n "checking whether the C compiler works... " >&6; }
-ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
-
-# The possible output files:
-ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
-
-ac_rmfiles=
-for ac_file in $ac_files
-do
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
-    * ) ac_rmfiles="$ac_rmfiles $ac_file";;
-  esac
-done
-rm -f $ac_rmfiles
-
-if { { ac_try="$ac_link_default"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link_default") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then :
-  # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
-# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
-# in a Makefile.  We should not override ac_cv_exeext if it was cached,
-# so that the user can short-circuit this test for compilers unknown to
-# Autoconf.
-for ac_file in $ac_files ''
-do
-  test -f "$ac_file" || continue
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
-	;;
-    [ab].out )
-	# We found the default executable, but exeext='' is most
-	# certainly right.
-	break;;
-    *.* )
-	if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
-	then :; else
-	   ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
-	fi
-	# We set ac_cv_exeext here because the later test for it is not
-	# safe: cross compilers may not add the suffix if given an `-o'
-	# argument, so we may need to know it at that point already.
-	# Even if this section looks crufty: it has the advantage of
-	# actually working.
-	break;;
-    * )
-	break;;
-  esac
-done
-test "$ac_cv_exeext" = no && ac_cv_exeext=
-
-else
-  ac_file=''
-fi
-if test -z "$ac_file"; then :
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-$as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "C compiler cannot create executables
-See \`config.log' for more details" "$LINENO" 5; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
-$as_echo_n "checking for C compiler default output file name... " >&6; }
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
-$as_echo "$ac_file" >&6; }
-ac_exeext=$ac_cv_exeext
-
-rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
-ac_clean_files=$ac_clean_files_save
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
-$as_echo_n "checking for suffix of executables... " >&6; }
-if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then :
-  # If both `conftest.exe' and `conftest' are `present' (well, observable)
-# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
-# work properly (i.e., refer to `conftest.exe'), while it won't with
-# `rm'.
-for ac_file in conftest.exe conftest conftest.*; do
-  test -f "$ac_file" || continue
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
-    *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
-	  break;;
-    * ) break;;
-  esac
-done
-else
-  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-rm -f conftest conftest$ac_cv_exeext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
-$as_echo "$ac_cv_exeext" >&6; }
-
-rm -f conftest.$ac_ext
-EXEEXT=$ac_cv_exeext
-ac_exeext=$EXEEXT
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdio.h>
-int
-main ()
-{
-FILE *f = fopen ("conftest.out", "w");
- return ferror (f) || fclose (f) != 0;
-
-  ;
-  return 0;
-}
-_ACEOF
-ac_clean_files="$ac_clean_files conftest.out"
-# Check that the compiler produces executables we can run.  If not, either
-# the compiler is broken, or we cross compile.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
-$as_echo_n "checking whether we are cross compiling... " >&6; }
-if test "$cross_compiling" != yes; then
-  { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-  if { ac_try='./conftest$ac_cv_exeext'
-  { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; }; then
-    cross_compiling=no
-  else
-    if test "$cross_compiling" = maybe; then
-	cross_compiling=yes
-    else
-	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot run C compiled programs.
-If you meant to cross compile, use \`--host'.
-See \`config.log' for more details" "$LINENO" 5; }
-    fi
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
-$as_echo "$cross_compiling" >&6; }
-
-rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
-ac_clean_files=$ac_clean_files_save
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
-$as_echo_n "checking for suffix of object files... " >&6; }
-if ${ac_cv_objext+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-rm -f conftest.o conftest.obj
-if { { ac_try="$ac_compile"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compile") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then :
-  for ac_file in conftest.o conftest.obj conftest.*; do
-  test -f "$ac_file" || continue;
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
-    *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
-       break;;
-  esac
-done
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of object files: cannot compile
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-rm -f conftest.$ac_cv_objext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
-$as_echo "$ac_cv_objext" >&6; }
-OBJEXT=$ac_cv_objext
-ac_objext=$OBJEXT
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
-$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
-if ${ac_cv_c_compiler_gnu+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-#ifndef __GNUC__
-       choke me
-#endif
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_compiler_gnu=yes
-else
-  ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_c_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
-$as_echo "$ac_cv_c_compiler_gnu" >&6; }
-if test $ac_compiler_gnu = yes; then
-  GCC=yes
-else
-  GCC=
-fi
-ac_test_CFLAGS=${CFLAGS+set}
-ac_save_CFLAGS=$CFLAGS
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
-$as_echo_n "checking whether $CC accepts -g... " >&6; }
-if ${ac_cv_prog_cc_g+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_save_c_werror_flag=$ac_c_werror_flag
-   ac_c_werror_flag=yes
-   ac_cv_prog_cc_g=no
-   CFLAGS="-g"
-   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_prog_cc_g=yes
-else
-  CFLAGS=""
-      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-
-else
-  ac_c_werror_flag=$ac_save_c_werror_flag
-	 CFLAGS="-g"
-	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_prog_cc_g=yes
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-   ac_c_werror_flag=$ac_save_c_werror_flag
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
-$as_echo "$ac_cv_prog_cc_g" >&6; }
-if test "$ac_test_CFLAGS" = set; then
-  CFLAGS=$ac_save_CFLAGS
-elif test $ac_cv_prog_cc_g = yes; then
-  if test "$GCC" = yes; then
-    CFLAGS="-g -O2"
-  else
-    CFLAGS="-g"
-  fi
-else
-  if test "$GCC" = yes; then
-    CFLAGS="-O2"
-  else
-    CFLAGS=
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
-$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
-if ${ac_cv_prog_cc_c89+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_cv_prog_cc_c89=no
-ac_save_CC=$CC
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdarg.h>
-#include <stdio.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
-struct buf { int x; };
-FILE * (*rcsopen) (struct buf *, struct stat *, int);
-static char *e (p, i)
-     char **p;
-     int i;
-{
-  return p[i];
-}
-static char *f (char * (*g) (char **, int), char **p, ...)
-{
-  char *s;
-  va_list v;
-  va_start (v,p);
-  s = g (p, va_arg (v,int));
-  va_end (v);
-  return s;
-}
-
-/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
-   function prototypes and stuff, but not '\xHH' hex character constants.
-   These don't provoke an error unfortunately, instead are silently treated
-   as 'x'.  The following induces an error, until -std is added to get
-   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
-   array size at least.  It's necessary to write '\x00'==0 to get something
-   that's true only with -std.  */
-int osf4_cc_array ['\x00' == 0 ? 1 : -1];
-
-/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
-   inside strings and character constants.  */
-#define FOO(x) 'x'
-int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
-
-int test (int i, double x);
-struct s1 {int (*f) (int a);};
-struct s2 {int (*f) (double a);};
-int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
-int argc;
-char **argv;
-int
-main ()
-{
-return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];
-  ;
-  return 0;
-}
-_ACEOF
-for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
-	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
-do
-  CC="$ac_save_CC $ac_arg"
-  if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_prog_cc_c89=$ac_arg
-fi
-rm -f core conftest.err conftest.$ac_objext
-  test "x$ac_cv_prog_cc_c89" != "xno" && break
-done
-rm -f conftest.$ac_ext
-CC=$ac_save_CC
-
-fi
-# AC_CACHE_VAL
-case "x$ac_cv_prog_cc_c89" in
-  x)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
-$as_echo "none needed" >&6; } ;;
-  xno)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
-$as_echo "unsupported" >&6; } ;;
-  *)
-    CC="$CC $ac_cv_prog_cc_c89"
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
-$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
-esac
-if test "x$ac_cv_prog_cc_c89" != xno; then :
-
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-DEPDIR="${am__leading_dot}deps"
-
-ac_config_commands="$ac_config_commands depfiles"
-
-
-am_make=${MAKE-make}
-cat > confinc << 'END'
-am__doit:
-	@echo this is the am__doit target
-.PHONY: am__doit
-END
-# If we don't find an include directive, just comment out the code.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
-$as_echo_n "checking for style of include used by $am_make... " >&6; }
-am__include="#"
-am__quote=
-_am_result=none
-# First try GNU make style include.
-echo "include confinc" > confmf
-# Ignore all kinds of additional output from `make'.
-case `$am_make -s -f confmf 2> /dev/null` in #(
-*the\ am__doit\ target*)
-  am__include=include
-  am__quote=
-  _am_result=GNU
-  ;;
-esac
-# Now try BSD make style include.
-if test "$am__include" = "#"; then
-   echo '.include "confinc"' > confmf
-   case `$am_make -s -f confmf 2> /dev/null` in #(
-   *the\ am__doit\ target*)
-     am__include=.include
-     am__quote="\""
-     _am_result=BSD
-     ;;
-   esac
-fi
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
-$as_echo "$_am_result" >&6; }
-rm -f confinc confmf
-
-# Check whether --enable-dependency-tracking was given.
-if test "${enable_dependency_tracking+set}" = set; then :
-  enableval=$enable_dependency_tracking;
-fi
-
-if test "x$enable_dependency_tracking" != xno; then
-  am_depcomp="$ac_aux_dir/depcomp"
-  AMDEPBACKSLASH='\'
-fi
- if test "x$enable_dependency_tracking" != xno; then
-  AMDEP_TRUE=
-  AMDEP_FALSE='#'
-else
-  AMDEP_TRUE='#'
-  AMDEP_FALSE=
-fi
-
-
-
-depcc="$CC"   am_compiler_list=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
-$as_echo_n "checking dependency style of $depcc... " >&6; }
-if ${am_cv_CC_dependencies_compiler_type+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
-  # We make a subdir and do the tests there.  Otherwise we can end up
-  # making bogus files that we don't know about and never remove.  For
-  # instance it was reported that on HP-UX the gcc test will end up
-  # making a dummy file named `D' -- because `-MD' means `put the output
-  # in D'.
-  mkdir conftest.dir
-  # Copy depcomp to subdir because otherwise we won't find it if we're
-  # using a relative directory.
-  cp "$am_depcomp" conftest.dir
-  cd conftest.dir
-  # We will build objects and dependencies in a subdirectory because
-  # it helps to detect inapplicable dependency modes.  For instance
-  # both Tru64's cc and ICC support -MD to output dependencies as a
-  # side effect of compilation, but ICC will put the dependencies in
-  # the current directory while Tru64 will put them in the object
-  # directory.
-  mkdir sub
-
-  am_cv_CC_dependencies_compiler_type=none
-  if test "$am_compiler_list" = ""; then
-     am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
-  fi
-  am__universal=false
-  case " $depcc " in #(
-     *\ -arch\ *\ -arch\ *) am__universal=true ;;
-     esac
-
-  for depmode in $am_compiler_list; do
-    # Setup a source with many dependencies, because some compilers
-    # like to wrap large dependency lists on column 80 (with \), and
-    # we should not choose a depcomp mode which is confused by this.
-    #
-    # We need to recreate these files for each test, as the compiler may
-    # overwrite some of them when testing with obscure command lines.
-    # This happens at least with the AIX C compiler.
-    : > sub/conftest.c
-    for i in 1 2 3 4 5 6; do
-      echo '#include "conftst'$i'.h"' >> sub/conftest.c
-      # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
-      # Solaris 8's {/usr,}/bin/sh.
-      touch sub/conftst$i.h
-    done
-    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
-
-    # We check with `-c' and `-o' for the sake of the "dashmstdout"
-    # mode.  It turns out that the SunPro C++ compiler does not properly
-    # handle `-M -o', and we need to detect this.  Also, some Intel
-    # versions had trouble with output in subdirs
-    am__obj=sub/conftest.${OBJEXT-o}
-    am__minus_obj="-o $am__obj"
-    case $depmode in
-    gcc)
-      # This depmode causes a compiler race in universal mode.
-      test "$am__universal" = false || continue
-      ;;
-    nosideeffect)
-      # after this tag, mechanisms are not by side-effect, so they'll
-      # only be used when explicitly requested
-      if test "x$enable_dependency_tracking" = xyes; then
-	continue
-      else
-	break
-      fi
-      ;;
-    msvisualcpp | msvcmsys)
-      # This compiler won't grok `-c -o', but also, the minuso test has
-      # not run yet.  These depmodes are late enough in the game, and
-      # so weak that their functioning should not be impacted.
-      am__obj=conftest.${OBJEXT-o}
-      am__minus_obj=
-      ;;
-    none) break ;;
-    esac
-    if depmode=$depmode \
-       source=sub/conftest.c object=$am__obj \
-       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
-       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
-         >/dev/null 2>conftest.err &&
-       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
-       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
-       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
-       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
-      # icc doesn't choke on unknown options, it will just issue warnings
-      # or remarks (even with -Werror).  So we grep stderr for any message
-      # that says an option was ignored or not supported.
-      # When given -MP, icc 7.0 and 7.1 complain thusly:
-      #   icc: Command line warning: ignoring option '-M'; no argument required
-      # The diagnosis changed in icc 8.0:
-      #   icc: Command line remark: option '-MP' not supported
-      if (grep 'ignoring option' conftest.err ||
-          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
-        am_cv_CC_dependencies_compiler_type=$depmode
-        break
-      fi
-    fi
-  done
-
-  cd ..
-  rm -rf conftest.dir
-else
-  am_cv_CC_dependencies_compiler_type=none
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
-$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
-CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
-
- if
-  test "x$enable_dependency_tracking" != xno \
-  && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
-  am__fastdepCC_TRUE=
-  am__fastdepCC_FALSE='#'
-else
-  am__fastdepCC_TRUE='#'
-  am__fastdepCC_FALSE=
-fi
-
-
-if test "x$CC" != xcc; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5
-$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5
-$as_echo_n "checking whether cc understands -c and -o together... " >&6; }
-fi
-set dummy $CC; ac_cc=`$as_echo "$2" |
-		      sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'`
-if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-# Make sure it works both with $CC and with simple cc.
-# We do the test twice because some compilers refuse to overwrite an
-# existing .o file with -o, though they will create one.
-ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5'
-rm -f conftest2.*
-if { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } &&
-   test -f conftest2.$ac_objext && { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; };
-then
-  eval ac_cv_prog_cc_${ac_cc}_c_o=yes
-  if test "x$CC" != xcc; then
-    # Test first that cc exists at all.
-    if { ac_try='cc -c conftest.$ac_ext >&5'
-  { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; }; then
-      ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5'
-      rm -f conftest2.*
-      if { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } &&
-	 test -f conftest2.$ac_objext && { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; };
-      then
-	# cc works too.
-	:
-      else
-	# cc exists but doesn't like -o.
-	eval ac_cv_prog_cc_${ac_cc}_c_o=no
-      fi
-    fi
-  fi
-else
-  eval ac_cv_prog_cc_${ac_cc}_c_o=no
-fi
-rm -f core conftest*
-
-fi
-if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-
-$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h
-
-fi
-
-# FIXME: we rely on the cache variable name because
-# there is no other way.
-set dummy $CC
-am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'`
-eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o
-if test "$am_t" != yes; then
-   # Losing compiler, so override with the script.
-   # FIXME: It is wrong to rewrite CC.
-   # But if we don't then we get into trouble of one sort or another.
-   # A longer-term fix would be to have automake use am__CC in this case,
-   # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
-   CC="$am_aux_dir/compile $CC"
-fi
-
-
-#if test "$ac_cv_prog_cc_${ac_cc}_c_o" = yes; then
-#    case "$AM_CFLAGS" in
-#	"-g") ;;
-#	*) AM_CFLAGS="${AM_CFLAGS:+$AM_CFLAGS }-g";;
-#    esac
-#fi
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
-$as_echo_n "checking how to run the C preprocessor... " >&6; }
-# On Suns, sometimes $CPP names a directory.
-if test -n "$CPP" && test -d "$CPP"; then
-  CPP=
-fi
-if test -z "$CPP"; then
-  if ${ac_cv_prog_CPP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-      # Double quotes because CPP needs to be expanded
-    for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
-    do
-      ac_preproc_ok=false
-for ac_c_preproc_warn_flag in '' yes
-do
-  # Use a header file that comes with gcc, so configuring glibc
-  # with a fresh cross-compiler works.
-  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
-  # <limits.h> exists even on freestanding compilers.
-  # On the NeXT, cc -E runs the code through the compiler's parser,
-  # not just through cpp. "Syntax error" is here to catch this case.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-		     Syntax error
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-
-else
-  # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-  # OK, works on sane cases.  Now check whether nonexistent headers
-  # can be detected and how.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-  # Broken: success on invalid input.
-continue
-else
-  # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
-  break
-fi
-
-    done
-    ac_cv_prog_CPP=$CPP
-
-fi
-  CPP=$ac_cv_prog_CPP
-else
-  ac_cv_prog_CPP=$CPP
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
-$as_echo "$CPP" >&6; }
-ac_preproc_ok=false
-for ac_c_preproc_warn_flag in '' yes
-do
-  # Use a header file that comes with gcc, so configuring glibc
-  # with a fresh cross-compiler works.
-  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
-  # <limits.h> exists even on freestanding compilers.
-  # On the NeXT, cc -E runs the code through the compiler's parser,
-  # not just through cpp. "Syntax error" is here to catch this case.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-		     Syntax error
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-
-else
-  # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-  # OK, works on sane cases.  Now check whether nonexistent headers
-  # can be detected and how.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-  # Broken: success on invalid input.
-continue
-else
-  # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
-
-else
-  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5
-$as_echo_n "checking whether ln -s works... " >&6; }
-LN_S=$as_ln_s
-if test "$LN_S" = "ln -s"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5
-$as_echo "no, using $LN_S" >&6; }
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
-$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
-set x ${MAKE-make}
-ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
-if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat >conftest.make <<\_ACEOF
-SHELL = /bin/sh
-all:
-	@echo '@@@%%%=$(MAKE)=@@@%%%'
-_ACEOF
-# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
-case `${MAKE-make} -f conftest.make 2>/dev/null` in
-  *@@@%%%=?*=@@@%%%*)
-    eval ac_cv_prog_make_${ac_make}_set=yes;;
-  *)
-    eval ac_cv_prog_make_${ac_make}_set=no;;
-esac
-rm -f conftest.make
-fi
-if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-  SET_MAKE=
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-  SET_MAKE="MAKE=${MAKE-make}"
-fi
-
-for ac_prog in 'bison -y' byacc
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_YACC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$YACC"; then
-  ac_cv_prog_YACC="$YACC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_YACC="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-YACC=$ac_cv_prog_YACC
-if test -n "$YACC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $YACC" >&5
-$as_echo "$YACC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$YACC" && break
-done
-test -n "$YACC" || YACC="yacc"
-
-
-for ac_prog in flex lex
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_LEX+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$LEX"; then
-  ac_cv_prog_LEX="$LEX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_LEX="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-LEX=$ac_cv_prog_LEX
-if test -n "$LEX"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LEX" >&5
-$as_echo "$LEX" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$LEX" && break
-done
-test -n "$LEX" || LEX=":"
-
-if test "x$LEX" != "x:"; then
-  cat >conftest.l <<_ACEOF
-%%
-a { ECHO; }
-b { REJECT; }
-c { yymore (); }
-d { yyless (1); }
-e { yyless (input () != 0); }
-f { unput (yytext[0]); }
-. { BEGIN INITIAL; }
-%%
-#ifdef YYTEXT_POINTER
-extern char *yytext;
-#endif
-int
-main (void)
-{
-  return ! yylex () + ! yywrap ();
-}
-_ACEOF
-{ { ac_try="$LEX conftest.l"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$LEX conftest.l") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking lex output file root" >&5
-$as_echo_n "checking lex output file root... " >&6; }
-if ${ac_cv_prog_lex_root+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-
-# UVCDAT patch (DL) This fails on some apple Travis CI builds, and isn't used by this package.
-# Just disable the file test.
-# This:
-  $as_echo_n "(skipped) " >&6
-# Replaces this:
-#if test -f lex.yy.c; then
-#  ac_cv_prog_lex_root=lex.yy
-#elif test -f lexyy.c; then
-#  ac_cv_prog_lex_root=lexyy
-#else
-#  as_fn_error $? "cannot find output from $LEX; giving up" "$LINENO" 5
-#fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_root" >&5
-$as_echo "$ac_cv_prog_lex_root" >&6; }
-LEX_OUTPUT_ROOT=$ac_cv_prog_lex_root
-
-if test -z "${LEXLIB+set}"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking lex library" >&5
-$as_echo_n "checking lex library... " >&6; }
-if ${ac_cv_lib_lex+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-
-    ac_save_LIBS=$LIBS
-    ac_cv_lib_lex='none needed'
-    for ac_lib in '' -lfl -ll; do
-      LIBS="$ac_lib $ac_save_LIBS"
-      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-`cat $LEX_OUTPUT_ROOT.c`
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_lex=$ac_lib
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-      test "$ac_cv_lib_lex" != 'none needed' && break
-    done
-    LIBS=$ac_save_LIBS
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lex" >&5
-$as_echo "$ac_cv_lib_lex" >&6; }
-  test "$ac_cv_lib_lex" != 'none needed' && LEXLIB=$ac_cv_lib_lex
-fi
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether yytext is a pointer" >&5
-$as_echo_n "checking whether yytext is a pointer... " >&6; }
-if ${ac_cv_prog_lex_yytext_pointer+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  # POSIX says lex can declare yytext either as a pointer or an array; the
-# default is implementation-dependent.  Figure out which it is, since
-# not all implementations provide the %pointer and %array declarations.
-ac_cv_prog_lex_yytext_pointer=no
-ac_save_LIBS=$LIBS
-LIBS="$LEXLIB $ac_save_LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-  #define YYTEXT_POINTER 1
-`cat $LEX_OUTPUT_ROOT.c`
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_prog_lex_yytext_pointer=yes
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_save_LIBS
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_yytext_pointer" >&5
-$as_echo "$ac_cv_prog_lex_yytext_pointer" >&6; }
-if test $ac_cv_prog_lex_yytext_pointer = yes; then
-
-$as_echo "#define YYTEXT_POINTER 1" >>confdefs.h
-
-fi
-rm -f conftest.l $LEX_OUTPUT_ROOT.c
-
-fi
-if test "$LEX" = :; then
-  LEX=${am_missing_run}flex
-fi
-ac_ext=${ac_fc_srcext-f}
-ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5'
-ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_fc_compiler_gnu
-if test -n "$ac_tool_prefix"; then
-  for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$FC"; then
-  ac_cv_prog_FC="$FC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_FC="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-FC=$ac_cv_prog_FC
-if test -n "$FC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $FC" >&5
-$as_echo "$FC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$FC" && break
-  done
-fi
-if test -z "$FC"; then
-  ac_ct_FC=$FC
-  for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_FC"; then
-  ac_cv_prog_ac_ct_FC="$ac_ct_FC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_FC="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_FC=$ac_cv_prog_ac_ct_FC
-if test -n "$ac_ct_FC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_FC" >&5
-$as_echo "$ac_ct_FC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_FC" && break
-done
-
-  if test "x$ac_ct_FC" = x; then
-    FC=""
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    FC=$ac_ct_FC
-  fi
-fi
-
-
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for Fortran compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
-  { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    sed '10a\
-... rest of stderr output deleted ...
-         10q' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-  fi
-  rm -f conftest.er1 conftest.err
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-done
-rm -f a.out
-
-# If we don't use `.F' as extension, the preprocessor is not run on the
-# input file.  (Note that this only needs to work for GNU compilers.)
-ac_save_ext=$ac_ext
-ac_ext=F
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Fortran compiler" >&5
-$as_echo_n "checking whether we are using the GNU Fortran compiler... " >&6; }
-if ${ac_cv_fc_compiler_gnu+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat > conftest.$ac_ext <<_ACEOF
-      program main
-#ifndef __GNUC__
-       choke me
-#endif
-
-      end
-_ACEOF
-if ac_fn_fc_try_compile "$LINENO"; then :
-  ac_compiler_gnu=yes
-else
-  ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_fc_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_fc_compiler_gnu" >&5
-$as_echo "$ac_cv_fc_compiler_gnu" >&6; }
-ac_ext=$ac_save_ext
-ac_test_FCFLAGS=${FCFLAGS+set}
-ac_save_FCFLAGS=$FCFLAGS
-FCFLAGS=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $FC accepts -g" >&5
-$as_echo_n "checking whether $FC accepts -g... " >&6; }
-if ${ac_cv_prog_fc_g+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  FCFLAGS=-g
-cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_compile "$LINENO"; then :
-  ac_cv_prog_fc_g=yes
-else
-  ac_cv_prog_fc_g=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_fc_g" >&5
-$as_echo "$ac_cv_prog_fc_g" >&6; }
-if test "$ac_test_FCFLAGS" = set; then
-  FCFLAGS=$ac_save_FCFLAGS
-elif test $ac_cv_prog_fc_g = yes; then
-  if test "x$ac_cv_fc_compiler_gnu" = xyes; then
-    FCFLAGS="-g -O2"
-  else
-    FCFLAGS="-g"
-  fi
-else
-  if test "x$ac_cv_fc_compiler_gnu" = xyes; then
-    FCFLAGS="-O2"
-  else
-    FCFLAGS=
-  fi
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-# Checks for libraries.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing dirname" >&5
-$as_echo_n "checking for library containing dirname... " >&6; }
-if ${ac_cv_search_dirname+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_func_search_save_LIBS=$LIBS
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dirname ();
-int
-main ()
-{
-return dirname ();
-  ;
-  return 0;
-}
-_ACEOF
-for ac_lib in '' gen; do
-  if test -z "$ac_lib"; then
-    ac_res="none required"
-  else
-    ac_res=-l$ac_lib
-    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-  fi
-  if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_search_dirname=$ac_res
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext
-  if ${ac_cv_search_dirname+:} false; then :
-  break
-fi
-done
-if ${ac_cv_search_dirname+:} false; then :
-
-else
-  ac_cv_search_dirname=no
-fi
-rm conftest.$ac_ext
-LIBS=$ac_func_search_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_dirname" >&5
-$as_echo "$ac_cv_search_dirname" >&6; }
-ac_res=$ac_cv_search_dirname
-if test "$ac_res" != no; then :
-  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
-
-else
-  as_fn_error $? "cannot find function dirname" "$LINENO" 5
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing log10" >&5
-$as_echo_n "checking for library containing log10... " >&6; }
-if ${ac_cv_search_log10+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_func_search_save_LIBS=$LIBS
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char log10 ();
-int
-main ()
-{
-return log10 ();
-  ;
-  return 0;
-}
-_ACEOF
-for ac_lib in '' m; do
-  if test -z "$ac_lib"; then
-    ac_res="none required"
-  else
-    ac_res=-l$ac_lib
-    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-  fi
-  if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_search_log10=$ac_res
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext
-  if ${ac_cv_search_log10+:} false; then :
-  break
-fi
-done
-if ${ac_cv_search_log10+:} false; then :
-
-else
-  ac_cv_search_log10=no
-fi
-rm conftest.$ac_ext
-LIBS=$ac_func_search_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_log10" >&5
-$as_echo "$ac_cv_search_log10" >&6; }
-ac_res=$ac_cv_search_log10
-if test "$ac_res" != no; then :
-  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
-
-else
-  as_fn_error $? "cannot find function log10" "$LINENO" 5
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing XML_StopParser" >&5
-$as_echo_n "checking for library containing XML_StopParser... " >&6; }
-if ${ac_cv_search_XML_StopParser+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_func_search_save_LIBS=$LIBS
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char XML_StopParser ();
-int
-main ()
-{
-return XML_StopParser ();
-  ;
-  return 0;
-}
-_ACEOF
-for ac_lib in '' expat; do
-  if test -z "$ac_lib"; then
-    ac_res="none required"
-  else
-    ac_res=-l$ac_lib
-    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-  fi
-  if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_search_XML_StopParser=$ac_res
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext
-  if ${ac_cv_search_XML_StopParser+:} false; then :
-  break
-fi
-done
-if ${ac_cv_search_XML_StopParser+:} false; then :
-
-else
-  ac_cv_search_XML_StopParser=no
-fi
-rm conftest.$ac_ext
-LIBS=$ac_func_search_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_XML_StopParser" >&5
-$as_echo "$ac_cv_search_XML_StopParser" >&6; }
-ac_res=$ac_cv_search_XML_StopParser
-if test "$ac_res" != no; then :
-  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
-
-else
-  as_fn_error $? "cannot find EXPAT function XML_StopParser" "$LINENO" 5
-fi
-
-
-# Checks for header files.
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
-$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
-if ${ac_cv_path_GREP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -z "$GREP"; then
-  ac_path_GREP_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in grep ggrep; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue
-# Check for GNU ac_path_GREP and select it if it is found.
-  # Check for GNU $ac_path_GREP
-case `"$ac_path_GREP" --version 2>&1` in
-*GNU*)
-  ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo 'GREP' >> "conftest.nl"
-    "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_GREP_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_GREP="$ac_path_GREP"
-      ac_path_GREP_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_GREP_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_GREP"; then
-    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
-  fi
-else
-  ac_cv_path_GREP=$GREP
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
-$as_echo "$ac_cv_path_GREP" >&6; }
- GREP="$ac_cv_path_GREP"
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
-$as_echo_n "checking for egrep... " >&6; }
-if ${ac_cv_path_EGREP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
-   then ac_cv_path_EGREP="$GREP -E"
-   else
-     if test -z "$EGREP"; then
-  ac_path_EGREP_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in egrep; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue
-# Check for GNU ac_path_EGREP and select it if it is found.
-  # Check for GNU $ac_path_EGREP
-case `"$ac_path_EGREP" --version 2>&1` in
-*GNU*)
-  ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo 'EGREP' >> "conftest.nl"
-    "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_EGREP_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_EGREP="$ac_path_EGREP"
-      ac_path_EGREP_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_EGREP_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_EGREP"; then
-    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
-  fi
-else
-  ac_cv_path_EGREP=$EGREP
-fi
-
-   fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
-$as_echo "$ac_cv_path_EGREP" >&6; }
- EGREP="$ac_cv_path_EGREP"
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
-$as_echo_n "checking for ANSI C header files... " >&6; }
-if ${ac_cv_header_stdc+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdlib.h>
-#include <stdarg.h>
-#include <string.h>
-#include <float.h>
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_header_stdc=yes
-else
-  ac_cv_header_stdc=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-if test $ac_cv_header_stdc = yes; then
-  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <string.h>
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-  $EGREP "memchr" >/dev/null 2>&1; then :
-
-else
-  ac_cv_header_stdc=no
-fi
-rm -f conftest*
-
-fi
-
-if test $ac_cv_header_stdc = yes; then
-  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdlib.h>
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-  $EGREP "free" >/dev/null 2>&1; then :
-
-else
-  ac_cv_header_stdc=no
-fi
-rm -f conftest*
-
-fi
-
-if test $ac_cv_header_stdc = yes; then
-  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
-  if test "$cross_compiling" = yes; then :
-  :
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <ctype.h>
-#include <stdlib.h>
-#if ((' ' & 0x0FF) == 0x020)
-# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
-# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
-#else
-# define ISLOWER(c) \
-		   (('a' <= (c) && (c) <= 'i') \
-		     || ('j' <= (c) && (c) <= 'r') \
-		     || ('s' <= (c) && (c) <= 'z'))
-# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
-#endif
-
-#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
-int
-main ()
-{
-  int i;
-  for (i = 0; i < 256; i++)
-    if (XOR (islower (i), ISLOWER (i))
-	|| toupper (i) != TOUPPER (i))
-      return 2;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_run "$LINENO"; then :
-
-else
-  ac_cv_header_stdc=no
-fi
-rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
-  conftest.$ac_objext conftest.beam conftest.$ac_ext
-fi
-
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
-$as_echo "$ac_cv_header_stdc" >&6; }
-if test $ac_cv_header_stdc = yes; then
-
-$as_echo "#define STDC_HEADERS 1" >>confdefs.h
-
-fi
-
-# On IRIX 5.3, sys/types and inttypes.h are conflicting.
-for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
-		  inttypes.h stdint.h unistd.h
-do :
-  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
-ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
-"
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-
-done
-
-
-for ac_header in float.h inttypes.h stddef.h stdlib.h string.h strings.h
-do :
-  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
-ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-
-done
-
-
-# Checks for the CUNIT unit-testing package
-LD_CUNIT=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: Checking for the CUNIT unit-testing package." >&5
-$as_echo "$as_me: Checking for the CUNIT unit-testing package." >&6;}
-ac_fn_c_check_header_mongrel "$LINENO" "CUnit/CUnit.h" "ac_cv_header_CUnit_CUnit_h" "$ac_includes_default"
-if test "x$ac_cv_header_CUnit_CUnit_h" = xyes; then :
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CU_initialize_registry in -lcunit" >&5
-$as_echo_n "checking for CU_initialize_registry in -lcunit... " >&6; }
-if ${ac_cv_lib_cunit_CU_initialize_registry+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-lcunit  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char CU_initialize_registry ();
-int
-main ()
-{
-return CU_initialize_registry ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_cunit_CU_initialize_registry=yes
-else
-  ac_cv_lib_cunit_CU_initialize_registry=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_cunit_CU_initialize_registry" >&5
-$as_echo "$ac_cv_lib_cunit_CU_initialize_registry" >&6; }
-if test "x$ac_cv_lib_cunit_CU_initialize_registry" = xyes; then :
-  LD_CUNIT=-lcunit
-fi
-
-fi
-
-
-
-if test "$LD_CUNIT"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT found.  Enabling unit-tests." >&5
-$as_echo "$as_me: CUNIT found.  Enabling unit-tests." >&6;}
-else
-    { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT not found.  Disabling unit-tests." >&5
-$as_echo "$as_me: CUNIT not found.  Disabling unit-tests." >&6;}
-fi
- if test "$LD_CUNIT"; then
-  HAVE_CUNIT_TRUE=
-  HAVE_CUNIT_FALSE='#'
-else
-  HAVE_CUNIT_TRUE='#'
-  HAVE_CUNIT_FALSE=
-fi
-
-
-# Checks for typedefs, structures, and compiler characteristics.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5
-$as_echo_n "checking for an ANSI C-conforming const... " >&6; }
-if ${ac_cv_c_const+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-/* FIXME: Include the comments suggested by Paul. */
-#ifndef __cplusplus
-  /* Ultrix mips cc rejects this.  */
-  typedef int charset[2];
-  const charset cs;
-  /* SunOS 4.1.1 cc rejects this.  */
-  char const *const *pcpcc;
-  char **ppc;
-  /* NEC SVR4.0.2 mips cc rejects this.  */
-  struct point {int x, y;};
-  static struct point const zero = {0,0};
-  /* AIX XL C 1.02.0.0 rejects this.
-     It does not let you subtract one const X* pointer from another in
-     an arm of an if-expression whose if-part is not a constant
-     expression */
-  const char *g = "string";
-  pcpcc = &g + (g ? g-g : 0);
-  /* HPUX 7.0 cc rejects these. */
-  ++pcpcc;
-  ppc = (char**) pcpcc;
-  pcpcc = (char const *const *) ppc;
-  { /* SCO 3.2v4 cc rejects this.  */
-    char *t;
-    char const *s = 0 ? (char *) 0 : (char const *) 0;
-
-    *t++ = 0;
-    if (s) return 0;
-  }
-  { /* Someone thinks the Sun supposedly-ANSI compiler will reject this.  */
-    int x[] = {25, 17};
-    const int *foo = &x[0];
-    ++foo;
-  }
-  { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */
-    typedef const int *iptr;
-    iptr p = 0;
-    ++p;
-  }
-  { /* AIX XL C 1.02.0.0 rejects this saying
-       "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */
-    struct s { int j; const int *ap[3]; };
-    struct s *b; b->j = 5;
-  }
-  { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */
-    const int foo = 10;
-    if (!foo) return 0;
-  }
-  return !cs[0] && !zero.x;
-#endif
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_c_const=yes
-else
-  ac_cv_c_const=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5
-$as_echo "$ac_cv_c_const" >&6; }
-if test $ac_cv_c_const = no; then
-
-$as_echo "#define const /**/" >>confdefs.h
-
-fi
-
-ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default"
-if test "x$ac_cv_type_size_t" = xyes; then :
-
-else
-
-cat >>confdefs.h <<_ACEOF
-#define size_t unsigned int
-_ACEOF
-
-fi
-
-
-# Checks for library functions.
-for ac_func in floor memmove memset modf pow strcasecmp strdup strpbrk
-do :
-  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-done
-
-
-case `pwd` in
-  *\ * | *\	*)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5
-$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;;
-esac
-
-
-
-macro_version='2.4.2'
-macro_revision='1.3337'
-
-
-
-
-
-
-
-
-
-
-
-
-
-ltmain="$ac_aux_dir/ltmain.sh"
-
-# Make sure we can run config.sub.
-$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
-  as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
-$as_echo_n "checking build system type... " >&6; }
-if ${ac_cv_build+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_build_alias=$build_alias
-test "x$ac_build_alias" = x &&
-  ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
-test "x$ac_build_alias" = x &&
-  as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
-ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
-  as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
-$as_echo "$ac_cv_build" >&6; }
-case $ac_cv_build in
-*-*-*) ;;
-*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
-esac
-build=$ac_cv_build
-ac_save_IFS=$IFS; IFS='-'
-set x $ac_cv_build
-shift
-build_cpu=$1
-build_vendor=$2
-shift; shift
-# Remember, the first character of IFS is used to create $*,
-# except with old shells:
-build_os=$*
-IFS=$ac_save_IFS
-case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
-$as_echo_n "checking host system type... " >&6; }
-if ${ac_cv_host+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test "x$host_alias" = x; then
-  ac_cv_host=$ac_cv_build
-else
-  ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
-    as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
-$as_echo "$ac_cv_host" >&6; }
-case $ac_cv_host in
-*-*-*) ;;
-*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
-esac
-host=$ac_cv_host
-ac_save_IFS=$IFS; IFS='-'
-set x $ac_cv_host
-shift
-host_cpu=$1
-host_vendor=$2
-shift; shift
-# Remember, the first character of IFS is used to create $*,
-# except with old shells:
-host_os=$*
-IFS=$ac_save_IFS
-case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
-
-
-# Backslashify metacharacters that are still active within
-# double-quoted strings.
-sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
-
-# Same as above, but do not quote variable references.
-double_quote_subst='s/\(["`\\]\)/\\\1/g'
-
-# Sed substitution to delay expansion of an escaped shell variable in a
-# double_quote_subst'ed string.
-delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
-
-# Sed substitution to delay expansion of an escaped single quote.
-delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
-
-# Sed substitution to avoid accidental globbing in evaled expressions
-no_glob_subst='s/\*/\\\*/g'
-
-ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
-ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
-$as_echo_n "checking how to print strings... " >&6; }
-# Test print first, because it will be a builtin if present.
-if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
-   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
-  ECHO='print -r --'
-elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-  ECHO='printf %s\n'
-else
-  # Use this function as a fallback that always works.
-  func_fallback_echo ()
-  {
-    eval 'cat <<_LTECHO_EOF
-$1
-_LTECHO_EOF'
-  }
-  ECHO='func_fallback_echo'
-fi
-
-# func_echo_all arg...
-# Invoke $ECHO with all args, space-separated.
-func_echo_all ()
-{
-    $ECHO ""
-}
-
-case "$ECHO" in
-  printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5
-$as_echo "printf" >&6; } ;;
-  print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5
-$as_echo "print -r" >&6; } ;;
-  *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5
-$as_echo "cat" >&6; } ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
-$as_echo_n "checking for a sed that does not truncate output... " >&6; }
-if ${ac_cv_path_SED+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-            ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
-     for ac_i in 1 2 3 4 5 6 7; do
-       ac_script="$ac_script$as_nl$ac_script"
-     done
-     echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
-     { ac_script=; unset ac_script;}
-     if test -z "$SED"; then
-  ac_path_SED_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in sed gsed; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_SED="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue
-# Check for GNU ac_path_SED and select it if it is found.
-  # Check for GNU $ac_path_SED
-case `"$ac_path_SED" --version 2>&1` in
-*GNU*)
-  ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo '' >> "conftest.nl"
-    "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_SED_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_SED="$ac_path_SED"
-      ac_path_SED_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_SED_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_SED"; then
-    as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5
-  fi
-else
-  ac_cv_path_SED=$SED
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5
-$as_echo "$ac_cv_path_SED" >&6; }
- SED="$ac_cv_path_SED"
-  rm -f conftest.sed
-
-test -z "$SED" && SED=sed
-Xsed="$SED -e 1s/^X//"
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
-$as_echo_n "checking for fgrep... " >&6; }
-if ${ac_cv_path_FGREP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
-   then ac_cv_path_FGREP="$GREP -F"
-   else
-     if test -z "$FGREP"; then
-  ac_path_FGREP_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in fgrep; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue
-# Check for GNU ac_path_FGREP and select it if it is found.
-  # Check for GNU $ac_path_FGREP
-case `"$ac_path_FGREP" --version 2>&1` in
-*GNU*)
-  ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo 'FGREP' >> "conftest.nl"
-    "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_FGREP_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_FGREP="$ac_path_FGREP"
-      ac_path_FGREP_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_FGREP_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_FGREP"; then
-    as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
-  fi
-else
-  ac_cv_path_FGREP=$FGREP
-fi
-
-   fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5
-$as_echo "$ac_cv_path_FGREP" >&6; }
- FGREP="$ac_cv_path_FGREP"
-
-
-test -z "$GREP" && GREP=grep
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# Check whether --with-gnu-ld was given.
-if test "${with_gnu_ld+set}" = set; then :
-  withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
-else
-  with_gnu_ld=no
-fi
-
-ac_prog=ld
-if test "$GCC" = yes; then
-  # Check if gcc -print-prog-name=ld gives a path.
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
-$as_echo_n "checking for ld used by $CC... " >&6; }
-  case $host in
-  *-*-mingw*)
-    # gcc leaves a trailing carriage return which upsets mingw
-    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
-  *)
-    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
-  esac
-  case $ac_prog in
-    # Accept absolute paths.
-    [\\/]* | ?:[\\/]*)
-      re_direlt='/[^/][^/]*/\.\./'
-      # Canonicalize the pathname of ld
-      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
-      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
-	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
-      done
-      test -z "$LD" && LD="$ac_prog"
-      ;;
-  "")
-    # If it fails, then pretend we aren't using GCC.
-    ac_prog=ld
-    ;;
-  *)
-    # If it is relative, then search for the first ld in PATH.
-    with_gnu_ld=unknown
-    ;;
-  esac
-elif test "$with_gnu_ld" = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
-$as_echo_n "checking for GNU ld... " >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
-$as_echo_n "checking for non-GNU ld... " >&6; }
-fi
-if ${lt_cv_path_LD+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -z "$LD"; then
-  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-  for ac_dir in $PATH; do
-    IFS="$lt_save_ifs"
-    test -z "$ac_dir" && ac_dir=.
-    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
-      lt_cv_path_LD="$ac_dir/$ac_prog"
-      # Check to see if the program is GNU ld.  I'd rather use --version,
-      # but apparently some variants of GNU ld only accept -v.
-      # Break only if it was the GNU/non-GNU ld that we prefer.
-      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
-      *GNU* | *'with BFD'*)
-	test "$with_gnu_ld" != no && break
-	;;
-      *)
-	test "$with_gnu_ld" != yes && break
-	;;
-      esac
-    fi
-  done
-  IFS="$lt_save_ifs"
-else
-  lt_cv_path_LD="$LD" # Let the user override the test with a path.
-fi
-fi
-
-LD="$lt_cv_path_LD"
-if test -n "$LD"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
-$as_echo "$LD" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
-$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
-if ${lt_cv_prog_gnu_ld+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  # I'd rather use --version here, but apparently some GNU lds only accept -v.
-case `$LD -v 2>&1 </dev/null` in
-*GNU* | *'with BFD'*)
-  lt_cv_prog_gnu_ld=yes
-  ;;
-*)
-  lt_cv_prog_gnu_ld=no
-  ;;
-esac
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
-$as_echo "$lt_cv_prog_gnu_ld" >&6; }
-with_gnu_ld=$lt_cv_prog_gnu_ld
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5
-$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; }
-if ${lt_cv_path_NM+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$NM"; then
-  # Let the user override the test.
-  lt_cv_path_NM="$NM"
-else
-  lt_nm_to_check="${ac_tool_prefix}nm"
-  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
-    lt_nm_to_check="$lt_nm_to_check nm"
-  fi
-  for lt_tmp_nm in $lt_nm_to_check; do
-    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
-      IFS="$lt_save_ifs"
-      test -z "$ac_dir" && ac_dir=.
-      tmp_nm="$ac_dir/$lt_tmp_nm"
-      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
-	# Check to see if the nm accepts a BSD-compat flag.
-	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
-	#   nm: unknown option "B" ignored
-	# Tru64's nm complains that /dev/null is an invalid object file
-	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
-	*/dev/null* | *'Invalid file or object type'*)
-	  lt_cv_path_NM="$tmp_nm -B"
-	  break
-	  ;;
-	*)
-	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
-	  */dev/null*)
-	    lt_cv_path_NM="$tmp_nm -p"
-	    break
-	    ;;
-	  *)
-	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
-	    continue # so that we can try to find one that supports BSD flags
-	    ;;
-	  esac
-	  ;;
-	esac
-      fi
-    done
-    IFS="$lt_save_ifs"
-  done
-  : ${lt_cv_path_NM=no}
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
-$as_echo "$lt_cv_path_NM" >&6; }
-if test "$lt_cv_path_NM" != "no"; then
-  NM="$lt_cv_path_NM"
-else
-  # Didn't find any BSD compatible name lister, look for dumpbin.
-  if test -n "$DUMPBIN"; then :
-    # Let the user override the test.
-  else
-    if test -n "$ac_tool_prefix"; then
-  for ac_prog in dumpbin "link -dump"
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DUMPBIN+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$DUMPBIN"; then
-  ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-DUMPBIN=$ac_cv_prog_DUMPBIN
-if test -n "$DUMPBIN"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5
-$as_echo "$DUMPBIN" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$DUMPBIN" && break
-  done
-fi
-if test -z "$DUMPBIN"; then
-  ac_ct_DUMPBIN=$DUMPBIN
-  for ac_prog in dumpbin "link -dump"
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_DUMPBIN"; then
-  ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_DUMPBIN="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN
-if test -n "$ac_ct_DUMPBIN"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5
-$as_echo "$ac_ct_DUMPBIN" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_DUMPBIN" && break
-done
-
-  if test "x$ac_ct_DUMPBIN" = x; then
-    DUMPBIN=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    DUMPBIN=$ac_ct_DUMPBIN
-  fi
-fi
-
-    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
-    *COFF*)
-      DUMPBIN="$DUMPBIN -symbols"
-      ;;
-    *)
-      DUMPBIN=:
-      ;;
-    esac
-  fi
-
-  if test "$DUMPBIN" != ":"; then
-    NM="$DUMPBIN"
-  fi
-fi
-test -z "$NM" && NM=nm
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5
-$as_echo_n "checking the name lister ($NM) interface... " >&6; }
-if ${lt_cv_nm_interface+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_nm_interface="BSD nm"
-  echo "int some_variable = 0;" > conftest.$ac_ext
-  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5)
-  (eval "$ac_compile" 2>conftest.err)
-  cat conftest.err >&5
-  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
-  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
-  cat conftest.err >&5
-  (eval echo "\"\$as_me:$LINENO: output\"" >&5)
-  cat conftest.out >&5
-  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
-    lt_cv_nm_interface="MS dumpbin"
-  fi
-  rm -f conftest*
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5
-$as_echo "$lt_cv_nm_interface" >&6; }
-
-# find the maximum length of command line arguments
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5
-$as_echo_n "checking the maximum length of command line arguments... " >&6; }
-if ${lt_cv_sys_max_cmd_len+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-    i=0
-  teststring="ABCD"
-
-  case $build_os in
-  msdosdjgpp*)
-    # On DJGPP, this test can blow up pretty badly due to problems in libc
-    # (any single argument exceeding 2000 bytes causes a buffer overrun
-    # during glob expansion).  Even if it were fixed, the result of this
-    # check would be larger than it should be.
-    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
-    ;;
-
-  gnu*)
-    # Under GNU Hurd, this test is not required because there is
-    # no limit to the length of command line arguments.
-    # Libtool will interpret -1 as no limit whatsoever
-    lt_cv_sys_max_cmd_len=-1;
-    ;;
-
-  cygwin* | mingw* | cegcc*)
-    # On Win9x/ME, this test blows up -- it succeeds, but takes
-    # about 5 minutes as the teststring grows exponentially.
-    # Worse, since 9x/ME are not pre-emptively multitasking,
-    # you end up with a "frozen" computer, even though with patience
-    # the test eventually succeeds (with a max line length of 256k).
-    # Instead, let's just punt: use the minimum linelength reported by
-    # all of the supported platforms: 8192 (on NT/2K/XP).
-    lt_cv_sys_max_cmd_len=8192;
-    ;;
-
-  mint*)
-    # On MiNT this can take a long time and run out of memory.
-    lt_cv_sys_max_cmd_len=8192;
-    ;;
-
-  amigaos*)
-    # On AmigaOS with pdksh, this test takes hours, literally.
-    # So we just punt and use a minimum line length of 8192.
-    lt_cv_sys_max_cmd_len=8192;
-    ;;
-
-  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
-    # This has been around since 386BSD, at least.  Likely further.
-    if test -x /sbin/sysctl; then
-      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
-    elif test -x /usr/sbin/sysctl; then
-      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
-    else
-      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
-    fi
-    # And add a safety zone
-    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
-    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
-    ;;
-
-  interix*)
-    # We know the value 262144 and hardcode it with a safety zone (like BSD)
-    lt_cv_sys_max_cmd_len=196608
-    ;;
-
-  os2*)
-    # The test takes a long time on OS/2.
-    lt_cv_sys_max_cmd_len=8192
-    ;;
-
-  osf*)
-    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
-    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
-    # nice to cause kernel panics so lets avoid the loop below.
-    # First set a reasonable default.
-    lt_cv_sys_max_cmd_len=16384
-    #
-    if test -x /sbin/sysconfig; then
-      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
-        *1*) lt_cv_sys_max_cmd_len=-1 ;;
-      esac
-    fi
-    ;;
-  sco3.2v5*)
-    lt_cv_sys_max_cmd_len=102400
-    ;;
-  sysv5* | sco5v6* | sysv4.2uw2*)
-    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
-    if test -n "$kargmax"; then
-      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[	 ]//'`
-    else
-      lt_cv_sys_max_cmd_len=32768
-    fi
-    ;;
-  *)
-    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
-    if test -n "$lt_cv_sys_max_cmd_len"; then
-      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
-      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
-    else
-      # Make teststring a little bigger before we do anything with it.
-      # a 1K string should be a reasonable start.
-      for i in 1 2 3 4 5 6 7 8 ; do
-        teststring=$teststring$teststring
-      done
-      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
-      # If test is not a shell built-in, we'll probably end up computing a
-      # maximum length that is only half of the actual maximum length, but
-      # we can't tell.
-      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
-	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
-	      test $i != 17 # 1/2 MB should be enough
-      do
-        i=`expr $i + 1`
-        teststring=$teststring$teststring
-      done
-      # Only check the string length outside the loop.
-      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
-      teststring=
-      # Add a significant safety factor because C++ compilers can tack on
-      # massive amounts of additional arguments before passing them to the
-      # linker.  It appears as though 1/2 is a usable value.
-      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
-    fi
-    ;;
-  esac
-
-fi
-
-if test -n $lt_cv_sys_max_cmd_len ; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5
-$as_echo "$lt_cv_sys_max_cmd_len" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5
-$as_echo "none" >&6; }
-fi
-max_cmd_len=$lt_cv_sys_max_cmd_len
-
-
-
-
-
-
-: ${CP="cp -f"}
-: ${MV="mv -f"}
-: ${RM="rm -f"}
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5
-$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; }
-# Try some XSI features
-xsi_shell=no
-( _lt_dummy="a/b/c"
-  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-      = c,a/b,b/c, \
-    && eval 'test $(( 1 + 1 )) -eq 2 \
-    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
-  && xsi_shell=yes
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5
-$as_echo "$xsi_shell" >&6; }
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5
-$as_echo_n "checking whether the shell understands \"+=\"... " >&6; }
-lt_shell_append=no
-( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \
-    >/dev/null 2>&1 \
-  && lt_shell_append=yes
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5
-$as_echo "$lt_shell_append" >&6; }
-
-
-if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
-  lt_unset=unset
-else
-  lt_unset=false
-fi
-
-
-
-
-
-# test EBCDIC or ASCII
-case `echo X|tr X '\101'` in
- A) # ASCII based system
-    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
-  lt_SP2NL='tr \040 \012'
-  lt_NL2SP='tr \015\012 \040\040'
-  ;;
- *) # EBCDIC based system
-  lt_SP2NL='tr \100 \n'
-  lt_NL2SP='tr \r\n \100\100'
-  ;;
-esac
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-if ${lt_cv_to_host_file_cmd+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  case $host in
-  *-*-mingw* )
-    case $build in
-      *-*-mingw* ) # actually msys
-        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-        ;;
-      *-*-cygwin* )
-        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-        ;;
-      * ) # otherwise, assume *nix
-        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-        ;;
-    esac
-    ;;
-  *-*-cygwin* )
-    case $build in
-      *-*-mingw* ) # actually msys
-        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-        ;;
-      *-*-cygwin* )
-        lt_cv_to_host_file_cmd=func_convert_file_noop
-        ;;
-      * ) # otherwise, assume *nix
-        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-        ;;
-    esac
-    ;;
-  * ) # unhandled hosts (and "normal" native builds)
-    lt_cv_to_host_file_cmd=func_convert_file_noop
-    ;;
-esac
-
-fi
-
-to_host_file_cmd=$lt_cv_to_host_file_cmd
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-if ${lt_cv_to_tool_file_cmd+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  #assume ordinary cross tools, or native build.
-lt_cv_to_tool_file_cmd=func_convert_file_noop
-case $host in
-  *-*-mingw* )
-    case $build in
-      *-*-mingw* ) # actually msys
-        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-        ;;
-    esac
-    ;;
-esac
-
-fi
-
-to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
-$as_echo_n "checking for $LD option to reload object files... " >&6; }
-if ${lt_cv_ld_reload_flag+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ld_reload_flag='-r'
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5
-$as_echo "$lt_cv_ld_reload_flag" >&6; }
-reload_flag=$lt_cv_ld_reload_flag
-case $reload_flag in
-"" | " "*) ;;
-*) reload_flag=" $reload_flag" ;;
-esac
-reload_cmds='$LD$reload_flag -o $output$reload_objs'
-case $host_os in
-  cygwin* | mingw* | pw32* | cegcc*)
-    if test "$GCC" != yes; then
-      reload_cmds=false
-    fi
-    ;;
-  darwin*)
-    if test "$GCC" = yes; then
-      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-    else
-      reload_cmds='$LD$reload_flag -o $output$reload_objs'
-    fi
-    ;;
-esac
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
-set dummy ${ac_tool_prefix}objdump; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OBJDUMP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$OBJDUMP"; then
-  ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-OBJDUMP=$ac_cv_prog_OBJDUMP
-if test -n "$OBJDUMP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
-$as_echo "$OBJDUMP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OBJDUMP"; then
-  ac_ct_OBJDUMP=$OBJDUMP
-  # Extract the first word of "objdump", so it can be a program name with args.
-set dummy objdump; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_OBJDUMP"; then
-  ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_OBJDUMP="objdump"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
-if test -n "$ac_ct_OBJDUMP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
-$as_echo "$ac_ct_OBJDUMP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_OBJDUMP" = x; then
-    OBJDUMP="false"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    OBJDUMP=$ac_ct_OBJDUMP
-  fi
-else
-  OBJDUMP="$ac_cv_prog_OBJDUMP"
-fi
-
-test -z "$OBJDUMP" && OBJDUMP=objdump
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5
-$as_echo_n "checking how to recognize dependent libraries... " >&6; }
-if ${lt_cv_deplibs_check_method+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_file_magic_cmd='$MAGIC_CMD'
-lt_cv_file_magic_test_file=
-lt_cv_deplibs_check_method='unknown'
-# Need to set the preceding variable on all platforms that support
-# interlibrary dependencies.
-# 'none' -- dependencies not supported.
-# `unknown' -- same as none, but documents that we really don't know.
-# 'pass_all' -- all dependencies passed with no checks.
-# 'test_compile' -- check by making test program.
-# 'file_magic [[regex]]' -- check by looking for files in library path
-# which responds to the $file_magic_cmd with a given extended regex.
-# If you have `file' or equivalent on your system and you're not sure
-# whether `pass_all' will *always* work, you probably want this one.
-
-case $host_os in
-aix[4-9]*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-beos*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-bsdi[45]*)
-  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)'
-  lt_cv_file_magic_cmd='/usr/bin/file -L'
-  lt_cv_file_magic_test_file=/shlib/libc.so
-  ;;
-
-cygwin*)
-  # func_win32_libid is a shell function defined in ltmain.sh
-  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
-  lt_cv_file_magic_cmd='func_win32_libid'
-  ;;
-
-mingw* | pw32*)
-  # Base MSYS/MinGW do not provide the 'file' command needed by
-  # func_win32_libid shell function, so use a weaker test based on 'objdump',
-  # unless we find 'file', for example because we are cross-compiling.
-  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
-  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
-    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
-    lt_cv_file_magic_cmd='func_win32_libid'
-  else
-    # Keep this pattern in sync with the one in func_win32_libid.
-    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
-    lt_cv_file_magic_cmd='$OBJDUMP -f'
-  fi
-  ;;
-
-cegcc*)
-  # use the weaker test based on 'objdump'. See mingw*.
-  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
-  lt_cv_file_magic_cmd='$OBJDUMP -f'
-  ;;
-
-darwin* | rhapsody*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-freebsd* | dragonfly*)
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
-    case $host_cpu in
-    i*86 )
-      # Not sure whether the presence of OpenBSD here was a mistake.
-      # Let's accept both of them until this is cleared up.
-      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library'
-      lt_cv_file_magic_cmd=/usr/bin/file
-      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
-      ;;
-    esac
-  else
-    lt_cv_deplibs_check_method=pass_all
-  fi
-  ;;
-
-gnu*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-haiku*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-hpux10.20* | hpux11*)
-  lt_cv_file_magic_cmd=/usr/bin/file
-  case $host_cpu in
-  ia64*)
-    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64'
-    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
-    ;;
-  hppa*64*)
-    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'
-    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
-    ;;
-  *)
-    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library'
-    lt_cv_file_magic_test_file=/usr/lib/libc.sl
-    ;;
-  esac
-  ;;
-
-interix[3-9]*)
-  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
-  lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$'
-  ;;
-
-irix5* | irix6* | nonstopux*)
-  case $LD in
-  *-32|*"-32 ") libmagic=32-bit;;
-  *-n32|*"-n32 ") libmagic=N32;;
-  *-64|*"-64 ") libmagic=64-bit;;
-  *) libmagic=never-match;;
-  esac
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-# This must be glibc/ELF.
-linux* | k*bsd*-gnu | kopensolaris*-gnu)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-netbsd*)
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
-  else
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$'
-  fi
-  ;;
-
-newos6*)
-  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)'
-  lt_cv_file_magic_cmd=/usr/bin/file
-  lt_cv_file_magic_test_file=/usr/lib/libnls.so
-  ;;
-
-*nto* | *qnx*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-openbsd*)
-  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$'
-  else
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
-  fi
-  ;;
-
-osf3* | osf4* | osf5*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-rdos*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-solaris*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-sysv4 | sysv4.3*)
-  case $host_vendor in
-  motorola)
-    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]'
-    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
-    ;;
-  ncr)
-    lt_cv_deplibs_check_method=pass_all
-    ;;
-  sequent)
-    lt_cv_file_magic_cmd='/bin/file'
-    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
-    ;;
-  sni)
-    lt_cv_file_magic_cmd='/bin/file'
-    lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib"
-    lt_cv_file_magic_test_file=/lib/libc.so
-    ;;
-  siemens)
-    lt_cv_deplibs_check_method=pass_all
-    ;;
-  pc)
-    lt_cv_deplibs_check_method=pass_all
-    ;;
-  esac
-  ;;
-
-tpf*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-esac
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
-$as_echo "$lt_cv_deplibs_check_method" >&6; }
-
-file_magic_glob=
-want_nocaseglob=no
-if test "$build" = "$host"; then
-  case $host_os in
-  mingw* | pw32*)
-    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-      want_nocaseglob=yes
-    else
-      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-    fi
-    ;;
-  esac
-fi
-
-file_magic_cmd=$lt_cv_file_magic_cmd
-deplibs_check_method=$lt_cv_deplibs_check_method
-test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DLLTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$DLLTOOL"; then
-  ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-DLLTOOL=$ac_cv_prog_DLLTOOL
-if test -n "$DLLTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-$as_echo "$DLLTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_DLLTOOL"; then
-  ac_ct_DLLTOOL=$DLLTOOL
-  # Extract the first word of "dlltool", so it can be a program name with args.
-set dummy dlltool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_DLLTOOL"; then
-  ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-if test -n "$ac_ct_DLLTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-$as_echo "$ac_ct_DLLTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_DLLTOOL" = x; then
-    DLLTOOL="false"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    DLLTOOL=$ac_ct_DLLTOOL
-  fi
-else
-  DLLTOOL="$ac_cv_prog_DLLTOOL"
-fi
-
-test -z "$DLLTOOL" && DLLTOOL=dlltool
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_sharedlib_from_linklib_cmd='unknown'
-
-case $host_os in
-cygwin* | mingw* | pw32* | cegcc*)
-  # two different shell functions defined in ltmain.sh
-  # decide which to use based on capabilities of $DLLTOOL
-  case `$DLLTOOL --help 2>&1` in
-  *--identify-strict*)
-    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-    ;;
-  *)
-    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-    ;;
-  esac
-  ;;
-*)
-  # fallback: assume linklib IS sharedlib
-  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-  ;;
-esac
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  for ac_prog in ar
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AR+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$AR"; then
-  ac_cv_prog_AR="$AR" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-AR=$ac_cv_prog_AR
-if test -n "$AR"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
-$as_echo "$AR" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$AR" && break
-  done
-fi
-if test -z "$AR"; then
-  ac_ct_AR=$AR
-  for ac_prog in ar
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_AR+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_AR"; then
-  ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_AR="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_AR=$ac_cv_prog_ac_ct_AR
-if test -n "$ac_ct_AR"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
-$as_echo "$ac_ct_AR" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_AR" && break
-done
-
-  if test "x$ac_ct_AR" = x; then
-    AR="false"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    AR=$ac_ct_AR
-  fi
-fi
-
-: ${AR=ar}
-: ${AR_FLAGS=cru}
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-$as_echo_n "checking for archiver @FILE support... " >&6; }
-if ${lt_cv_ar_at_file+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ar_at_file=no
-   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  echo conftest.$ac_objext > conftest.lst
-      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-      { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-  (eval $lt_ar_try) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-      if test "$ac_status" -eq 0; then
-	# Ensure the archiver fails upon bogus file names.
-	rm -f conftest.$ac_objext libconftest.a
-	{ { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-  (eval $lt_ar_try) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-	if test "$ac_status" -ne 0; then
-          lt_cv_ar_at_file=@
-        fi
-      fi
-      rm -f conftest.* libconftest.a
-
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-$as_echo "$lt_cv_ar_at_file" >&6; }
-
-if test "x$lt_cv_ar_at_file" = xno; then
-  archiver_list_spec=
-else
-  archiver_list_spec=$lt_cv_ar_at_file
-fi
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-set dummy ${ac_tool_prefix}strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$STRIP"; then
-  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-STRIP=$ac_cv_prog_STRIP
-if test -n "$STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
-$as_echo "$STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_STRIP"; then
-  ac_ct_STRIP=$STRIP
-  # Extract the first word of "strip", so it can be a program name with args.
-set dummy strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_STRIP"; then
-  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_STRIP="strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
-if test -n "$ac_ct_STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
-$as_echo "$ac_ct_STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_STRIP" = x; then
-    STRIP=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    STRIP=$ac_ct_STRIP
-  fi
-else
-  STRIP="$ac_cv_prog_STRIP"
-fi
-
-test -z "$STRIP" && STRIP=:
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
-set dummy ${ac_tool_prefix}ranlib; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_RANLIB+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$RANLIB"; then
-  ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-RANLIB=$ac_cv_prog_RANLIB
-if test -n "$RANLIB"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
-$as_echo "$RANLIB" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_RANLIB"; then
-  ac_ct_RANLIB=$RANLIB
-  # Extract the first word of "ranlib", so it can be a program name with args.
-set dummy ranlib; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_RANLIB"; then
-  ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_RANLIB="ranlib"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
-if test -n "$ac_ct_RANLIB"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
-$as_echo "$ac_ct_RANLIB" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_RANLIB" = x; then
-    RANLIB=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    RANLIB=$ac_ct_RANLIB
-  fi
-else
-  RANLIB="$ac_cv_prog_RANLIB"
-fi
-
-test -z "$RANLIB" && RANLIB=:
-
-
-
-
-
-
-# Determine commands to create old-style static archives.
-old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
-old_postinstall_cmds='chmod 644 $oldlib'
-old_postuninstall_cmds=
-
-if test -n "$RANLIB"; then
-  case $host_os in
-  openbsd*)
-    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
-    ;;
-  *)
-    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
-    ;;
-  esac
-  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
-fi
-
-case $host_os in
-  darwin*)
-    lock_old_archive_extraction=yes ;;
-  *)
-    lock_old_archive_extraction=no ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-
-# Check for command to grab the raw symbol name followed by C symbol from nm.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5
-$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; }
-if ${lt_cv_sys_global_symbol_pipe+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-
-# These are sane defaults that work on at least a few old systems.
-# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
-
-# Character class describing NM global symbol codes.
-symcode='[BCDEGRST]'
-
-# Regexp to match symbols that can be accessed directly from C.
-sympat='\([_A-Za-z][_A-Za-z0-9]*\)'
-
-# Define system-specific variables.
-case $host_os in
-aix*)
-  symcode='[BCDT]'
-  ;;
-cygwin* | mingw* | pw32* | cegcc*)
-  symcode='[ABCDGISTW]'
-  ;;
-hpux*)
-  if test "$host_cpu" = ia64; then
-    symcode='[ABCDEGRST]'
-  fi
-  ;;
-irix* | nonstopux*)
-  symcode='[BCDEGRST]'
-  ;;
-osf*)
-  symcode='[BCDEGQRST]'
-  ;;
-solaris*)
-  symcode='[BDRT]'
-  ;;
-sco3.2v5*)
-  symcode='[DT]'
-  ;;
-sysv4.2uw2*)
-  symcode='[DT]'
-  ;;
-sysv5* | sco5v6* | unixware* | OpenUNIX*)
-  symcode='[ABDT]'
-  ;;
-sysv4)
-  symcode='[DFNSTU]'
-  ;;
-esac
-
-# If we're using GNU nm, then use its standard symbol codes.
-case `$NM -V 2>&1` in
-*GNU* | *'with BFD'*)
-  symcode='[ABCDGIRSTW]' ;;
-esac
-
-# Transform an extracted symbol line into a proper C declaration.
-# Some systems (esp. on ia64) link data and code symbols differently,
-# so use this general approach.
-lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
-# Transform an extracted symbol line into symbol name and symbol address
-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"\2\", (void *) \&\2},/p'"
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
-
-# Handle CRLF in mingw tool chain
-opt_cr=
-case $build_os in
-mingw*)
-  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
-  ;;
-esac
-
-# Try without a prefix underscore, then with it.
-for ac_symprfx in "" "_"; do
-
-  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
-  symxfrm="\\1 $ac_symprfx\\2 \\2"
-
-  # Write the raw and C identifiers.
-  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-    # Fake it for dumpbin and say T for any non-static function
-    # and D for any global variable.
-    # Also find C++ and __fastcall symbols from MSVC++,
-    # which start with @ or ?.
-    lt_cv_sys_global_symbol_pipe="$AWK '"\
-"     {last_section=section; section=\$ 3};"\
-"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
-"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
-"     \$ 0!~/External *\|/{next};"\
-"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
-"     {if(hide[section]) next};"\
-"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
-"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
-"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
-"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
-"     ' prfx=^$ac_symprfx"
-  else
-    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[	 ]\($symcode$symcode*\)[	 ][	 ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
-  fi
-  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
-  # Check to see that the pipe works correctly.
-  pipe_works=no
-
-  rm -f conftest*
-  cat > conftest.$ac_ext <<_LT_EOF
-#ifdef __cplusplus
-extern "C" {
-#endif
-char nm_test_var;
-void nm_test_func(void);
-void nm_test_func(void){}
-#ifdef __cplusplus
-}
-#endif
-int main(){nm_test_var='a';nm_test_func();return(0);}
-_LT_EOF
-
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    # Now try to grab the symbols.
-    nlist=conftest.nm
-    if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5
-  (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s "$nlist"; then
-      # Try sorting and uniquifying the output.
-      if sort "$nlist" | uniq > "$nlist"T; then
-	mv -f "$nlist"T "$nlist"
-      else
-	rm -f "$nlist"T
-      fi
-
-      # Make sure that we snagged all the symbols we need.
-      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
-	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
-	  cat <<_LT_EOF > conftest.$ac_ext
-/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
-#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-/* DATA imports from DLLs on WIN32 con't be const, because runtime
-   relocations are performed -- see ld's documentation on pseudo-relocs.  */
-# define LT_DLSYM_CONST
-#elif defined(__osf__)
-/* This system does not cope well with relocations in const data.  */
-# define LT_DLSYM_CONST
-#else
-# define LT_DLSYM_CONST const
-#endif
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-_LT_EOF
-	  # Now generate the symbol file.
-	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
-
-	  cat <<_LT_EOF >> conftest.$ac_ext
-
-/* The mapping between symbol names and symbols.  */
-LT_DLSYM_CONST struct {
-  const char *name;
-  void       *address;
-}
-lt__PROGRAM__LTX_preloaded_symbols[] =
-{
-  { "@PROGRAM@", (void *) 0 },
-_LT_EOF
-	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
-	  cat <<\_LT_EOF >> conftest.$ac_ext
-  {0, (void *) 0}
-};
-
-/* This works around a problem in FreeBSD linker */
-#ifdef FREEBSD_WORKAROUND
-static const void *lt_preloaded_setup() {
-  return lt__PROGRAM__LTX_preloaded_symbols;
-}
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-_LT_EOF
-	  # Now try linking the two files.
-	  mv conftest.$ac_objext conftstm.$ac_objext
-	  lt_globsym_save_LIBS=$LIBS
-	  lt_globsym_save_CFLAGS=$CFLAGS
-	  LIBS="conftstm.$ac_objext"
-	  CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
-	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s conftest${ac_exeext}; then
-	    pipe_works=yes
-	  fi
-	  LIBS=$lt_globsym_save_LIBS
-	  CFLAGS=$lt_globsym_save_CFLAGS
-	else
-	  echo "cannot find nm_test_func in $nlist" >&5
-	fi
-      else
-	echo "cannot find nm_test_var in $nlist" >&5
-      fi
-    else
-      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5
-    fi
-  else
-    echo "$progname: failed program was:" >&5
-    cat conftest.$ac_ext >&5
-  fi
-  rm -rf conftest* conftst*
-
-  # Do not use the global_symbol_pipe unless it works.
-  if test "$pipe_works" = yes; then
-    break
-  else
-    lt_cv_sys_global_symbol_pipe=
-  fi
-done
-
-fi
-
-if test -z "$lt_cv_sys_global_symbol_pipe"; then
-  lt_cv_sys_global_symbol_to_cdecl=
-fi
-if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
-$as_echo "failed" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
-$as_echo "ok" >&6; }
-fi
-
-# Response file support.
-if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-  nm_file_list_spec='@'
-elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-  nm_file_list_spec='@'
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-$as_echo_n "checking for sysroot... " >&6; }
-
-# Check whether --with-sysroot was given.
-if test "${with_sysroot+set}" = set; then :
-  withval=$with_sysroot;
-else
-  with_sysroot=no
-fi
-
-
-lt_sysroot=
-case ${with_sysroot} in #(
- yes)
-   if test "$GCC" = yes; then
-     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-   fi
-   ;; #(
- /*)
-   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
-   ;; #(
- no|'')
-   ;; #(
- *)
-   { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5
-$as_echo "${with_sysroot}" >&6; }
-   as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-   ;;
-esac
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-
-
-# Check whether --enable-libtool-lock was given.
-if test "${enable_libtool_lock+set}" = set; then :
-  enableval=$enable_libtool_lock;
-fi
-
-test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
-
-# Some flags need to be propagated to the compiler or linker for good
-# libtool support.
-case $host in
-ia64-*-hpux*)
-  # Find out which ABI we are using.
-  echo 'int i;' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    case `/usr/bin/file conftest.$ac_objext` in
-      *ELF-32*)
-	HPUX_IA64_MODE="32"
-	;;
-      *ELF-64*)
-	HPUX_IA64_MODE="64"
-	;;
-    esac
-  fi
-  rm -rf conftest*
-  ;;
-*-*-irix6*)
-  # Find out which ABI we are using.
-  echo '#line '$LINENO' "configure"' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    if test "$lt_cv_prog_gnu_ld" = yes; then
-      case `/usr/bin/file conftest.$ac_objext` in
-	*32-bit*)
-	  LD="${LD-ld} -melf32bsmip"
-	  ;;
-	*N32*)
-	  LD="${LD-ld} -melf32bmipn32"
-	  ;;
-	*64-bit*)
-	  LD="${LD-ld} -melf64bmip"
-	;;
-      esac
-    else
-      case `/usr/bin/file conftest.$ac_objext` in
-	*32-bit*)
-	  LD="${LD-ld} -32"
-	  ;;
-	*N32*)
-	  LD="${LD-ld} -n32"
-	  ;;
-	*64-bit*)
-	  LD="${LD-ld} -64"
-	  ;;
-      esac
-    fi
-  fi
-  rm -rf conftest*
-  ;;
-
-x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
-s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
-  # Find out which ABI we are using.
-  echo 'int i;' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    case `/usr/bin/file conftest.o` in
-      *32-bit*)
-	case $host in
-	  x86_64-*kfreebsd*-gnu)
-	    LD="${LD-ld} -m elf_i386_fbsd"
-	    ;;
-	  x86_64-*linux*)
-	    LD="${LD-ld} -m elf_i386"
-	    ;;
-	  ppc64-*linux*|powerpc64-*linux*)
-	    LD="${LD-ld} -m elf32ppclinux"
-	    ;;
-	  s390x-*linux*)
-	    LD="${LD-ld} -m elf_s390"
-	    ;;
-	  sparc64-*linux*)
-	    LD="${LD-ld} -m elf32_sparc"
-	    ;;
-	esac
-	;;
-      *64-bit*)
-	case $host in
-	  x86_64-*kfreebsd*-gnu)
-	    LD="${LD-ld} -m elf_x86_64_fbsd"
-	    ;;
-	  x86_64-*linux*)
-	    LD="${LD-ld} -m elf_x86_64"
-	    ;;
-	  ppc*-*linux*|powerpc*-*linux*)
-	    LD="${LD-ld} -m elf64ppc"
-	    ;;
-	  s390*-*linux*|s390*-*tpf*)
-	    LD="${LD-ld} -m elf64_s390"
-	    ;;
-	  sparc*-*linux*)
-	    LD="${LD-ld} -m elf64_sparc"
-	    ;;
-	esac
-	;;
-    esac
-  fi
-  rm -rf conftest*
-  ;;
-
-*-*-sco3.2v5*)
-  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
-  SAVE_CFLAGS="$CFLAGS"
-  CFLAGS="$CFLAGS -belf"
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5
-$as_echo_n "checking whether the C compiler needs -belf... " >&6; }
-if ${lt_cv_cc_needs_belf+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-     cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  lt_cv_cc_needs_belf=yes
-else
-  lt_cv_cc_needs_belf=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-     ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
-$as_echo "$lt_cv_cc_needs_belf" >&6; }
-  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
-    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
-    CFLAGS="$SAVE_CFLAGS"
-  fi
-  ;;
-*-*solaris*)
-  # Find out which ABI we are using.
-  echo 'int i;' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    case `/usr/bin/file conftest.o` in
-    *64-bit*)
-      case $lt_cv_prog_gnu_ld in
-      yes*)
-        case $host in
-        i?86-*-solaris*)
-          LD="${LD-ld} -m elf_x86_64"
-          ;;
-        sparc*-*-solaris*)
-          LD="${LD-ld} -m elf64_sparc"
-          ;;
-        esac
-        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
-        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
-          LD="${LD-ld}_sol2"
-        fi
-        ;;
-      *)
-	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
-	  LD="${LD-ld} -64"
-	fi
-	;;
-      esac
-      ;;
-    esac
-  fi
-  rm -rf conftest*
-  ;;
-esac
-
-need_locks="$enable_libtool_lock"
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-set dummy ${ac_tool_prefix}mt; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$MANIFEST_TOOL"; then
-  ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-if test -n "$MANIFEST_TOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-$as_echo "$MANIFEST_TOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-  ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-  # Extract the first word of "mt", so it can be a program name with args.
-set dummy mt; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_MANIFEST_TOOL"; then
-  ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-if test -n "$ac_ct_MANIFEST_TOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_MANIFEST_TOOL" = x; then
-    MANIFEST_TOOL=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-  fi
-else
-  MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-fi
-
-test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-if ${lt_cv_path_mainfest_tool+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_path_mainfest_tool=no
-  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-  cat conftest.err >&5
-  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-    lt_cv_path_mainfest_tool=yes
-  fi
-  rm -f conftest*
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-if test "x$lt_cv_path_mainfest_tool" != xyes; then
-  MANIFEST_TOOL=:
-fi
-
-
-
-
-
-
-  case $host_os in
-    rhapsody* | darwin*)
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args.
-set dummy ${ac_tool_prefix}dsymutil; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DSYMUTIL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$DSYMUTIL"; then
-  ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-DSYMUTIL=$ac_cv_prog_DSYMUTIL
-if test -n "$DSYMUTIL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5
-$as_echo "$DSYMUTIL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_DSYMUTIL"; then
-  ac_ct_DSYMUTIL=$DSYMUTIL
-  # Extract the first word of "dsymutil", so it can be a program name with args.
-set dummy dsymutil; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_DSYMUTIL"; then
-  ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_DSYMUTIL="dsymutil"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL
-if test -n "$ac_ct_DSYMUTIL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5
-$as_echo "$ac_ct_DSYMUTIL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_DSYMUTIL" = x; then
-    DSYMUTIL=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    DSYMUTIL=$ac_ct_DSYMUTIL
-  fi
-else
-  DSYMUTIL="$ac_cv_prog_DSYMUTIL"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args.
-set dummy ${ac_tool_prefix}nmedit; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_NMEDIT+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$NMEDIT"; then
-  ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-NMEDIT=$ac_cv_prog_NMEDIT
-if test -n "$NMEDIT"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5
-$as_echo "$NMEDIT" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_NMEDIT"; then
-  ac_ct_NMEDIT=$NMEDIT
-  # Extract the first word of "nmedit", so it can be a program name with args.
-set dummy nmedit; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_NMEDIT"; then
-  ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_NMEDIT="nmedit"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT
-if test -n "$ac_ct_NMEDIT"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5
-$as_echo "$ac_ct_NMEDIT" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_NMEDIT" = x; then
-    NMEDIT=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    NMEDIT=$ac_ct_NMEDIT
-  fi
-else
-  NMEDIT="$ac_cv_prog_NMEDIT"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args.
-set dummy ${ac_tool_prefix}lipo; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_LIPO+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$LIPO"; then
-  ac_cv_prog_LIPO="$LIPO" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_LIPO="${ac_tool_prefix}lipo"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-LIPO=$ac_cv_prog_LIPO
-if test -n "$LIPO"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5
-$as_echo "$LIPO" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_LIPO"; then
-  ac_ct_LIPO=$LIPO
-  # Extract the first word of "lipo", so it can be a program name with args.
-set dummy lipo; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_LIPO+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_LIPO"; then
-  ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_LIPO="lipo"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO
-if test -n "$ac_ct_LIPO"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5
-$as_echo "$ac_ct_LIPO" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_LIPO" = x; then
-    LIPO=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    LIPO=$ac_ct_LIPO
-  fi
-else
-  LIPO="$ac_cv_prog_LIPO"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args.
-set dummy ${ac_tool_prefix}otool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$OTOOL"; then
-  ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_OTOOL="${ac_tool_prefix}otool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-OTOOL=$ac_cv_prog_OTOOL
-if test -n "$OTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5
-$as_echo "$OTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OTOOL"; then
-  ac_ct_OTOOL=$OTOOL
-  # Extract the first word of "otool", so it can be a program name with args.
-set dummy otool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_OTOOL"; then
-  ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_OTOOL="otool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL
-if test -n "$ac_ct_OTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5
-$as_echo "$ac_ct_OTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_OTOOL" = x; then
-    OTOOL=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    OTOOL=$ac_ct_OTOOL
-  fi
-else
-  OTOOL="$ac_cv_prog_OTOOL"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args.
-set dummy ${ac_tool_prefix}otool64; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OTOOL64+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$OTOOL64"; then
-  ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-OTOOL64=$ac_cv_prog_OTOOL64
-if test -n "$OTOOL64"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5
-$as_echo "$OTOOL64" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OTOOL64"; then
-  ac_ct_OTOOL64=$OTOOL64
-  # Extract the first word of "otool64", so it can be a program name with args.
-set dummy otool64; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_OTOOL64"; then
-  ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_OTOOL64="otool64"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64
-if test -n "$ac_ct_OTOOL64"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5
-$as_echo "$ac_ct_OTOOL64" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_OTOOL64" = x; then
-    OTOOL64=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    OTOOL64=$ac_ct_OTOOL64
-  fi
-else
-  OTOOL64="$ac_cv_prog_OTOOL64"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5
-$as_echo_n "checking for -single_module linker flag... " >&6; }
-if ${lt_cv_apple_cc_single_mod+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_apple_cc_single_mod=no
-      if test -z "${LT_MULTI_MODULE}"; then
-	# By default we will add the -single_module flag. You can override
-	# by either setting the environment variable LT_MULTI_MODULE
-	# non-empty at configure time, or by adding -multi_module to the
-	# link flags.
-	rm -rf libconftest.dylib*
-	echo "int foo(void){return 1;}" > conftest.c
-	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
--dynamiclib -Wl,-single_module conftest.c" >&5
-	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
-	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
-        _lt_result=$?
-	# If there is a non-empty error log, and "single_module"
-	# appears in it, assume the flag caused a linker warning
-        if test -s conftest.err && $GREP single_module conftest.err; then
-	  cat conftest.err >&5
-	# Otherwise, if the output was created with a 0 exit code from
-	# the compiler, it worked.
-	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
-	  lt_cv_apple_cc_single_mod=yes
-	else
-	  cat conftest.err >&5
-	fi
-	rm -rf libconftest.dylib*
-	rm -f conftest.*
-      fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5
-$as_echo "$lt_cv_apple_cc_single_mod" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5
-$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; }
-if ${lt_cv_ld_exported_symbols_list+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ld_exported_symbols_list=no
-      save_LDFLAGS=$LDFLAGS
-      echo "_main" > conftest.sym
-      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
-      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  lt_cv_ld_exported_symbols_list=yes
-else
-  lt_cv_ld_exported_symbols_list=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-	LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5
-$as_echo "$lt_cv_ld_exported_symbols_list" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5
-$as_echo_n "checking for -force_load linker flag... " >&6; }
-if ${lt_cv_ld_force_load+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ld_force_load=no
-      cat > conftest.c << _LT_EOF
-int forced_loaded() { return 2;}
-_LT_EOF
-      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5
-      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
-      echo "$AR cru libconftest.a conftest.o" >&5
-      $AR cru libconftest.a conftest.o 2>&5
-      echo "$RANLIB libconftest.a" >&5
-      $RANLIB libconftest.a 2>&5
-      cat > conftest.c << _LT_EOF
-int main() { return 0;}
-_LT_EOF
-      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5
-      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
-      _lt_result=$?
-      if test -s conftest.err && $GREP force_load conftest.err; then
-	cat conftest.err >&5
-      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
-	lt_cv_ld_force_load=yes
-      else
-	cat conftest.err >&5
-      fi
-        rm -f conftest.err libconftest.a conftest conftest.c
-        rm -rf conftest.dSYM
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5
-$as_echo "$lt_cv_ld_force_load" >&6; }
-    case $host_os in
-    rhapsody* | darwin1.[012])
-      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
-    darwin1.*)
-      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
-    darwin*) # darwin 5.x on
-      # if running on 10.5 or later, the deployment target defaults
-      # to the OS version, if on x86, and 10.4, the deployment
-      # target defaults to 10.4. Don't you love it?
-      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
-	10.0,*86*-darwin8*|10.0,*-darwin[91]*)
-	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
-	10.[012]*)
-	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
-	10.*)
-	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
-      esac
-    ;;
-  esac
-    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
-      _lt_dar_single_mod='$single_module'
-    fi
-    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
-      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
-    else
-      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
-    fi
-    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
-      _lt_dsymutil='~$DSYMUTIL $lib || :'
-    else
-      _lt_dsymutil=
-    fi
-    ;;
-  esac
-
-for ac_header in dlfcn.h
-do :
-  ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default
-"
-if test "x$ac_cv_header_dlfcn_h" = xyes; then :
-  cat >>confdefs.h <<_ACEOF
-#define HAVE_DLFCN_H 1
-_ACEOF
-
-fi
-
-done
-
-
-
-func_stripname_cnf ()
-{
-  case ${2} in
-  .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-  *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-  esac
-} # func_stripname_cnf
-
-
-
-
-
-
-# Set options
-
-
-
-        enable_dlopen=no
-
-
-  enable_win32_dll=no
-
-
-            # Check whether --enable-shared was given.
-if test "${enable_shared+set}" = set; then :
-  enableval=$enable_shared; p=${PACKAGE-default}
-    case $enableval in
-    yes) enable_shared=yes ;;
-    no) enable_shared=no ;;
-    *)
-      enable_shared=no
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for pkg in $enableval; do
-	IFS="$lt_save_ifs"
-	if test "X$pkg" = "X$p"; then
-	  enable_shared=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  enable_shared=yes
-fi
-
-
-
-
-
-
-
-
-
-  # Check whether --enable-static was given.
-if test "${enable_static+set}" = set; then :
-  enableval=$enable_static; p=${PACKAGE-default}
-    case $enableval in
-    yes) enable_static=yes ;;
-    no) enable_static=no ;;
-    *)
-     enable_static=no
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for pkg in $enableval; do
-	IFS="$lt_save_ifs"
-	if test "X$pkg" = "X$p"; then
-	  enable_static=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  enable_static=yes
-fi
-
-
-
-
-
-
-
-
-
-
-# Check whether --with-pic was given.
-if test "${with_pic+set}" = set; then :
-  withval=$with_pic; lt_p=${PACKAGE-default}
-    case $withval in
-    yes|no) pic_mode=$withval ;;
-    *)
-      pic_mode=default
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for lt_pkg in $withval; do
-	IFS="$lt_save_ifs"
-	if test "X$lt_pkg" = "X$lt_p"; then
-	  pic_mode=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  pic_mode=default
-fi
-
-
-test -z "$pic_mode" && pic_mode=default
-
-
-
-
-
-
-
-  # Check whether --enable-fast-install was given.
-if test "${enable_fast_install+set}" = set; then :
-  enableval=$enable_fast_install; p=${PACKAGE-default}
-    case $enableval in
-    yes) enable_fast_install=yes ;;
-    no) enable_fast_install=no ;;
-    *)
-      enable_fast_install=no
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for pkg in $enableval; do
-	IFS="$lt_save_ifs"
-	if test "X$pkg" = "X$p"; then
-	  enable_fast_install=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  enable_fast_install=yes
-fi
-
-
-
-
-
-
-
-
-
-
-
-# This can be used to rebuild libtool when needed
-LIBTOOL_DEPS="$ltmain"
-
-# Always use our own libtool.
-LIBTOOL='$(SHELL) $(top_builddir)/libtool'
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-test -z "$LN_S" && LN_S="ln -s"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-if test -n "${ZSH_VERSION+set}" ; then
-   setopt NO_GLOB_SUBST
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5
-$as_echo_n "checking for objdir... " >&6; }
-if ${lt_cv_objdir+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  rm -f .libs 2>/dev/null
-mkdir .libs 2>/dev/null
-if test -d .libs; then
-  lt_cv_objdir=.libs
-else
-  # MS-DOS does not allow filenames that begin with a dot.
-  lt_cv_objdir=_libs
-fi
-rmdir .libs 2>/dev/null
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5
-$as_echo "$lt_cv_objdir" >&6; }
-objdir=$lt_cv_objdir
-
-
-
-
-
-cat >>confdefs.h <<_ACEOF
-#define LT_OBJDIR "$lt_cv_objdir/"
-_ACEOF
-
-
-
-
-case $host_os in
-aix3*)
-  # AIX sometimes has problems with the GCC collect2 program.  For some
-  # reason, if we set the COLLECT_NAMES environment variable, the problems
-  # vanish in a puff of smoke.
-  if test "X${COLLECT_NAMES+set}" != Xset; then
-    COLLECT_NAMES=
-    export COLLECT_NAMES
-  fi
-  ;;
-esac
-
-# Global variables:
-ofile=libtool
-can_build_shared=yes
-
-# All known linkers require a `.a' archive for static linking (except MSVC,
-# which needs '.lib').
-libext=a
-
-with_gnu_ld="$lt_cv_prog_gnu_ld"
-
-old_CC="$CC"
-old_CFLAGS="$CFLAGS"
-
-# Set sane defaults for various variables
-test -z "$CC" && CC=cc
-test -z "$LTCC" && LTCC=$CC
-test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
-test -z "$LD" && LD=ld
-test -z "$ac_objext" && ac_objext=o
-
-for cc_temp in $compiler""; do
-  case $cc_temp in
-    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
-    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
-    \-*) ;;
-    *) break;;
-  esac
-done
-cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
-
-
-# Only perform the check for file, if the check method requires it
-test -z "$MAGIC_CMD" && MAGIC_CMD=file
-case $deplibs_check_method in
-file_magic*)
-  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5
-$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; }
-if ${lt_cv_path_MAGIC_CMD+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  case $MAGIC_CMD in
-[\\/*] |  ?:[\\/]*)
-  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
-  ;;
-*)
-  lt_save_MAGIC_CMD="$MAGIC_CMD"
-  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
-  for ac_dir in $ac_dummy; do
-    IFS="$lt_save_ifs"
-    test -z "$ac_dir" && ac_dir=.
-    if test -f $ac_dir/${ac_tool_prefix}file; then
-      lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file"
-      if test -n "$file_magic_test_file"; then
-	case $deplibs_check_method in
-	"file_magic "*)
-	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
-	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
-	    $EGREP "$file_magic_regex" > /dev/null; then
-	    :
-	  else
-	    cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such.  This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem.  Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
-	  fi ;;
-	esac
-      fi
-      break
-    fi
-  done
-  IFS="$lt_save_ifs"
-  MAGIC_CMD="$lt_save_MAGIC_CMD"
-  ;;
-esac
-fi
-
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
-$as_echo "$MAGIC_CMD" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-
-
-
-if test -z "$lt_cv_path_MAGIC_CMD"; then
-  if test -n "$ac_tool_prefix"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5
-$as_echo_n "checking for file... " >&6; }
-if ${lt_cv_path_MAGIC_CMD+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  case $MAGIC_CMD in
-[\\/*] |  ?:[\\/]*)
-  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
-  ;;
-*)
-  lt_save_MAGIC_CMD="$MAGIC_CMD"
-  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
-  for ac_dir in $ac_dummy; do
-    IFS="$lt_save_ifs"
-    test -z "$ac_dir" && ac_dir=.
-    if test -f $ac_dir/file; then
-      lt_cv_path_MAGIC_CMD="$ac_dir/file"
-      if test -n "$file_magic_test_file"; then
-	case $deplibs_check_method in
-	"file_magic "*)
-	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
-	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
-	    $EGREP "$file_magic_regex" > /dev/null; then
-	    :
-	  else
-	    cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such.  This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem.  Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
-	  fi ;;
-	esac
-      fi
-      break
-    fi
-  done
-  IFS="$lt_save_ifs"
-  MAGIC_CMD="$lt_save_MAGIC_CMD"
-  ;;
-esac
-fi
-
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
-$as_echo "$MAGIC_CMD" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  else
-    MAGIC_CMD=:
-  fi
-fi
-
-  fi
-  ;;
-esac
-
-# Use C for the default configuration in the libtool script
-
-lt_save_CC="$CC"
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-# Source file extension for C test sources.
-ac_ext=c
-
-# Object file extension for compiled C test sources.
-objext=o
-objext=$objext
-
-# Code to be used in simple compile tests
-lt_simple_compile_test_code="int some_variable = 0;"
-
-# Code to be used in simple link tests
-lt_simple_link_test_code='int main(){return(0);}'
-
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-# Save the default compiler, since it gets overwritten when the other
-# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
-compiler_DEFAULT=$CC
-
-# save warnings/boilerplate of simple test code
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-
-
-## CAVEAT EMPTOR:
-## There is no encapsulation within the following macros, do not change
-## the running order or otherwise move them around unless you know exactly
-## what you are doing...
-if test -n "$compiler"; then
-
-lt_prog_compiler_no_builtin_flag=
-
-if test "$GCC" = yes; then
-  case $cc_basename in
-  nvcc*)
-    lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;;
-  *)
-    lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;;
-  esac
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5
-$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; }
-if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_rtti_exceptions=no
-   ac_outfile=conftest.$ac_objext
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-   lt_compiler_flag="-fno-rtti -fno-exceptions"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   # The option is referenced via a variable to avoid confusing sed.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>conftest.err)
-   ac_status=$?
-   cat conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s "$ac_outfile"; then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings other than the usual output.
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
-     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_rtti_exceptions=yes
-     fi
-   fi
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5
-$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; }
-
-if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then
-    lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions"
-else
-    :
-fi
-
-fi
-
-
-
-
-
-
-  lt_prog_compiler_wl=
-lt_prog_compiler_pic=
-lt_prog_compiler_static=
-
-
-  if test "$GCC" = yes; then
-    lt_prog_compiler_wl='-Wl,'
-    lt_prog_compiler_static='-static'
-
-    case $host_os in
-      aix*)
-      # All AIX code is PIC.
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static='-Bstatic'
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            lt_prog_compiler_pic='-fPIC'
-        ;;
-      m68k)
-            # FIXME: we need at least 68020 code to build shared libraries, but
-            # adding the `-m68020' flag to GCC prevents building anything better,
-            # like `-m68040'.
-            lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4'
-        ;;
-      esac
-      ;;
-
-    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
-      # PIC is the default for these OSes.
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      # Although the cygwin gcc ignores -fPIC, still need this for old-style
-      # (--disable-auto-import) libraries
-      lt_prog_compiler_pic='-DDLL_EXPORT'
-      ;;
-
-    darwin* | rhapsody*)
-      # PIC is the default on this platform
-      # Common symbols not allowed in MH_DYLIB files
-      lt_prog_compiler_pic='-fno-common'
-      ;;
-
-    haiku*)
-      # PIC is the default for Haiku.
-      # The "-static" flag exists, but is broken.
-      lt_prog_compiler_static=
-      ;;
-
-    hpux*)
-      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
-      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
-      # sets the default TLS model and affects inlining.
-      case $host_cpu in
-      hppa*64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic='-fPIC'
-	;;
-      esac
-      ;;
-
-    interix[3-9]*)
-      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
-      # Instead, we relocate shared libraries at runtime.
-      ;;
-
-    msdosdjgpp*)
-      # Just because we use GCC doesn't mean we suddenly get shared libraries
-      # on systems that don't support them.
-      lt_prog_compiler_can_build_shared=no
-      enable_shared=no
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic='-fPIC -shared'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	lt_prog_compiler_pic=-Kconform_pic
-      fi
-      ;;
-
-    *)
-      lt_prog_compiler_pic='-fPIC'
-      ;;
-    esac
-
-    case $cc_basename in
-    nvcc*) # Cuda Compiler Driver 2.2
-      lt_prog_compiler_wl='-Xlinker '
-      if test -n "$lt_prog_compiler_pic"; then
-        lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic"
-      fi
-      ;;
-    esac
-  else
-    # PORTME Check for flag to pass linker flags through the system compiler.
-    case $host_os in
-    aix*)
-      lt_prog_compiler_wl='-Wl,'
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static='-Bstatic'
-      else
-	lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp'
-      fi
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      lt_prog_compiler_pic='-DDLL_EXPORT'
-      ;;
-
-    hpux9* | hpux10* | hpux11*)
-      lt_prog_compiler_wl='-Wl,'
-      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
-      # not for PA HP-UX.
-      case $host_cpu in
-      hppa*64*|ia64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic='+Z'
-	;;
-      esac
-      # Is there a better lt_prog_compiler_static that works with the bundled CC?
-      lt_prog_compiler_static='${wl}-a ${wl}archive'
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      lt_prog_compiler_wl='-Wl,'
-      # PIC (with -KPIC) is the default.
-      lt_prog_compiler_static='-non_shared'
-      ;;
-
-    linux* | k*bsd*-gnu | kopensolaris*-gnu)
-      case $cc_basename in
-      # old Intel for x86_64 which still supported -KPIC.
-      ecc*)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-KPIC'
-	lt_prog_compiler_static='-static'
-        ;;
-      # icc used to be incompatible with GCC.
-      # ICC 10 doesn't accept -KPIC any more.
-      icc* | ifort*)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-fPIC'
-	lt_prog_compiler_static='-static'
-        ;;
-      # Lahey Fortran 8.1.
-      lf95*)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='--shared'
-	lt_prog_compiler_static='--static'
-	;;
-      nagfor*)
-	# NAG Fortran compiler
-	lt_prog_compiler_wl='-Wl,-Wl,,'
-	lt_prog_compiler_pic='-PIC'
-	lt_prog_compiler_static='-Bstatic'
-	;;
-      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
-        # Portland Group compilers (*not* the Pentium gcc compiler,
-	# which looks to be a dead project)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-fpic'
-	lt_prog_compiler_static='-Bstatic'
-        ;;
-      ccc*)
-        lt_prog_compiler_wl='-Wl,'
-        # All Alpha code is PIC.
-        lt_prog_compiler_static='-non_shared'
-        ;;
-      xl* | bgxl* | bgf* | mpixl*)
-	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-qpic'
-	lt_prog_compiler_static='-qstaticlink'
-	;;
-      *)
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*)
-	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
-	  lt_prog_compiler_pic='-KPIC'
-	  lt_prog_compiler_static='-Bstatic'
-	  lt_prog_compiler_wl=''
-	  ;;
-	*Sun\ F* | *Sun*Fortran*)
-	  lt_prog_compiler_pic='-KPIC'
-	  lt_prog_compiler_static='-Bstatic'
-	  lt_prog_compiler_wl='-Qoption ld '
-	  ;;
-	*Sun\ C*)
-	  # Sun C 5.9
-	  lt_prog_compiler_pic='-KPIC'
-	  lt_prog_compiler_static='-Bstatic'
-	  lt_prog_compiler_wl='-Wl,'
-	  ;;
-        *Intel*\ [CF]*Compiler*)
-	  lt_prog_compiler_wl='-Wl,'
-	  lt_prog_compiler_pic='-fPIC'
-	  lt_prog_compiler_static='-static'
-	  ;;
-	*Portland\ Group*)
-	  lt_prog_compiler_wl='-Wl,'
-	  lt_prog_compiler_pic='-fpic'
-	  lt_prog_compiler_static='-Bstatic'
-	  ;;
-	esac
-	;;
-      esac
-      ;;
-
-    newsos6)
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic='-fPIC -shared'
-      ;;
-
-    osf3* | osf4* | osf5*)
-      lt_prog_compiler_wl='-Wl,'
-      # All OSF/1 code is PIC.
-      lt_prog_compiler_static='-non_shared'
-      ;;
-
-    rdos*)
-      lt_prog_compiler_static='-non_shared'
-      ;;
-
-    solaris*)
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      case $cc_basename in
-      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
-	lt_prog_compiler_wl='-Qoption ld ';;
-      *)
-	lt_prog_compiler_wl='-Wl,';;
-      esac
-      ;;
-
-    sunos4*)
-      lt_prog_compiler_wl='-Qoption ld '
-      lt_prog_compiler_pic='-PIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    sysv4 | sysv4.2uw2* | sysv4.3*)
-      lt_prog_compiler_wl='-Wl,'
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec ;then
-	lt_prog_compiler_pic='-Kconform_pic'
-	lt_prog_compiler_static='-Bstatic'
-      fi
-      ;;
-
-    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
-      lt_prog_compiler_wl='-Wl,'
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    unicos*)
-      lt_prog_compiler_wl='-Wl,'
-      lt_prog_compiler_can_build_shared=no
-      ;;
-
-    uts4*)
-      lt_prog_compiler_pic='-pic'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    *)
-      lt_prog_compiler_can_build_shared=no
-      ;;
-    esac
-  fi
-
-case $host_os in
-  # For platforms which do not support PIC, -DPIC is meaningless:
-  *djgpp*)
-    lt_prog_compiler_pic=
-    ;;
-  *)
-    lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
-    ;;
-esac
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-if ${lt_cv_prog_compiler_pic+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$lt_prog_compiler_pic"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5
-$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; }
-if ${lt_cv_prog_compiler_pic_works+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic_works=no
-   ac_outfile=conftest.$ac_objext
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-   lt_compiler_flag="$lt_prog_compiler_pic -DPIC"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   # The option is referenced via a variable to avoid confusing sed.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>conftest.err)
-   ac_status=$?
-   cat conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s "$ac_outfile"; then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings other than the usual output.
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
-     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_pic_works=yes
-     fi
-   fi
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5
-$as_echo "$lt_cv_prog_compiler_pic_works" >&6; }
-
-if test x"$lt_cv_prog_compiler_pic_works" = xyes; then
-    case $lt_prog_compiler_pic in
-     "" | " "*) ;;
-     *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;;
-     esac
-else
-    lt_prog_compiler_pic=
-     lt_prog_compiler_can_build_shared=no
-fi
-
-fi
-
-
-
-
-
-
-
-
-
-
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
-$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
-if ${lt_cv_prog_compiler_static_works+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_static_works=no
-   save_LDFLAGS="$LDFLAGS"
-   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
-   echo "$lt_simple_link_test_code" > conftest.$ac_ext
-   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
-     # The linker can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     if test -s conftest.err; then
-       # Append any errors to the config.log.
-       cat conftest.err 1>&5
-       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
-       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-       if diff conftest.exp conftest.er2 >/dev/null; then
-         lt_cv_prog_compiler_static_works=yes
-       fi
-     else
-       lt_cv_prog_compiler_static_works=yes
-     fi
-   fi
-   $RM -r conftest*
-   LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5
-$as_echo "$lt_cv_prog_compiler_static_works" >&6; }
-
-if test x"$lt_cv_prog_compiler_static_works" = xyes; then
-    :
-else
-    lt_prog_compiler_static=
-fi
-
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
-$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
-$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
-
-
-
-
-hard_links="nottested"
-if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then
-  # do not overwrite the value of need_locks provided by the user
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
-$as_echo_n "checking if we can lock with hard links... " >&6; }
-  hard_links=yes
-  $RM conftest*
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  touch conftest.a
-  ln conftest.a conftest.b 2>&5 || hard_links=no
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
-$as_echo "$hard_links" >&6; }
-  if test "$hard_links" = no; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
-$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
-    need_locks=warn
-  fi
-else
-  need_locks=no
-fi
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
-  runpath_var=
-  allow_undefined_flag=
-  always_export_symbols=no
-  archive_cmds=
-  archive_expsym_cmds=
-  compiler_needs_object=no
-  enable_shared_with_static_runtimes=no
-  export_dynamic_flag_spec=
-  export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-  hardcode_automatic=no
-  hardcode_direct=no
-  hardcode_direct_absolute=no
-  hardcode_libdir_flag_spec=
-  hardcode_libdir_separator=
-  hardcode_minus_L=no
-  hardcode_shlibpath_var=unsupported
-  inherit_rpath=no
-  link_all_deplibs=unknown
-  module_cmds=
-  module_expsym_cmds=
-  old_archive_from_new_cmds=
-  old_archive_from_expsyms_cmds=
-  thread_safe_flag_spec=
-  whole_archive_flag_spec=
-  # include_expsyms should be a list of space-separated symbols to be *always*
-  # included in the symbol list
-  include_expsyms=
-  # exclude_expsyms can be an extended regexp of symbols to exclude
-  # it will be wrapped by ` (' and `)$', so one must not match beginning or
-  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
-  # as well as any symbol that contains `d'.
-  exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
-  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
-  # platforms (ab)use it in PIC code, but their linkers get confused if
-  # the symbol is explicitly referenced.  Since portable code cannot
-  # rely on this symbol name, it's probably fine to never include it in
-  # preloaded symbol tables.
-  # Exclude shared library initialization/finalization symbols.
-  extract_expsyms_cmds=
-
-  case $host_os in
-  cygwin* | mingw* | pw32* | cegcc*)
-    # FIXME: the MSVC++ port hasn't been tested in a loooong time
-    # When not using gcc, we currently assume that we are using
-    # Microsoft Visual C++.
-    if test "$GCC" != yes; then
-      with_gnu_ld=no
-    fi
-    ;;
-  interix*)
-    # we just hope/assume this is gcc and not c89 (= MSVC++)
-    with_gnu_ld=yes
-    ;;
-  openbsd*)
-    with_gnu_ld=no
-    ;;
-  esac
-
-  ld_shlibs=yes
-
-  # On some targets, GNU ld is compatible enough with the native linker
-  # that we're better off using the native interface for both.
-  lt_use_gnu_ld_interface=no
-  if test "$with_gnu_ld" = yes; then
-    case $host_os in
-      aix*)
-	# The AIX port of GNU ld has always aspired to compatibility
-	# with the native linker.  However, as the warning in the GNU ld
-	# block says, versions before 2.19.5* couldn't really create working
-	# shared libraries, regardless of the interface used.
-	case `$LD -v 2>&1` in
-	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
-	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
-	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
-	  *)
-	    lt_use_gnu_ld_interface=yes
-	    ;;
-	esac
-	;;
-      *)
-	lt_use_gnu_ld_interface=yes
-	;;
-    esac
-  fi
-
-  if test "$lt_use_gnu_ld_interface" = yes; then
-    # If archive_cmds runs LD, not CC, wlarc should be empty
-    wlarc='${wl}'
-
-    # Set some defaults for GNU ld with shared library support. These
-    # are reset later if shared libraries are not supported. Putting them
-    # here allows them to be overridden if necessary.
-    runpath_var=LD_RUN_PATH
-    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-    export_dynamic_flag_spec='${wl}--export-dynamic'
-    # ancient GNU ld didn't support --whole-archive et. al.
-    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
-      whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
-    else
-      whole_archive_flag_spec=
-    fi
-    supports_anon_versioning=no
-    case `$LD -v 2>&1` in
-      *GNU\ gold*) supports_anon_versioning=yes ;;
-      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
-      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
-      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
-      *\ 2.11.*) ;; # other 2.11 versions
-      *) supports_anon_versioning=yes ;;
-    esac
-
-    # See if GNU ld supports shared libraries.
-    case $host_os in
-    aix[3-9]*)
-      # On AIX/PPC, the GNU linker is very broken
-      if test "$host_cpu" != ia64; then
-	ld_shlibs=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: the GNU linker, at least up to release 2.19, is reported
-*** to be unable to reliably create shared libraries on AIX.
-*** Therefore, libtool is disabling shared libraries support.  If you
-*** really care for shared libraries, you may want to install binutils
-*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
-*** You will then need to restart the configuration process.
-
-_LT_EOF
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds=''
-        ;;
-      m68k)
-            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec='-L$libdir'
-            hardcode_minus_L=yes
-        ;;
-      esac
-      ;;
-
-    beos*)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	allow_undefined_flag=unsupported
-	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
-	# support --undefined.  This deserves some investigation.  FIXME
-	archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless,
-      # as there is no search path for DLLs.
-      hardcode_libdir_flag_spec='-L$libdir'
-      export_dynamic_flag_spec='${wl}--export-all-symbols'
-      allow_undefined_flag=unsupported
-      always_export_symbols=no
-      enable_shared_with_static_runtimes=yes
-      export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-      exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
-      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-        archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-	# If the export-symbols file already is a .def file (1st line
-	# is EXPORTS), use it as is; otherwise, prepend...
-	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	  cp $export_symbols $output_objdir/$soname.def;
-	else
-	  echo EXPORTS > $output_objdir/$soname.def;
-	  cat $export_symbols >> $output_objdir/$soname.def;
-	fi~
-	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    haiku*)
-      archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      link_all_deplibs=yes
-      ;;
-
-    interix[3-9]*)
-      hardcode_direct=no
-      hardcode_shlibpath_var=no
-      hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
-      export_dynamic_flag_spec='${wl}-E'
-      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
-      # Instead, shared libraries are loaded at an image base (0x10000000 by
-      # default) and relocated if they conflict, which is a slow very memory
-      # consuming and fragmenting process.  To avoid this, we pick a random,
-      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
-      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
-      archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      ;;
-
-    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
-      tmp_diet=no
-      if test "$host_os" = linux-dietlibc; then
-	case $cc_basename in
-	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
-	esac
-      fi
-      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
-	 && test "$tmp_diet" = no
-      then
-	tmp_addflag=' $pic_flag'
-	tmp_sharedflag='-shared'
-	case $cc_basename,$host_cpu in
-        pgcc*)				# Portland Group C compiler
-	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag'
-	  ;;
-	pgf77* | pgf90* | pgf95* | pgfortran*)
-					# Portland Group f77 and f90 compilers
-	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag -Mnomain' ;;
-	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
-	  tmp_addflag=' -i_dynamic' ;;
-	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
-	  tmp_addflag=' -i_dynamic -nofor_main' ;;
-	ifc* | ifort*)			# Intel Fortran compiler
-	  tmp_addflag=' -nofor_main' ;;
-	lf95*)				# Lahey Fortran 8.1
-	  whole_archive_flag_spec=
-	  tmp_sharedflag='--shared' ;;
-	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
-	  tmp_sharedflag='-qmkshrobj'
-	  tmp_addflag= ;;
-	nvcc*)	# Cuda Compiler Driver 2.2
-	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object=yes
-	  ;;
-	esac
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ C*)			# Sun C 5.9
-	  whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object=yes
-	  tmp_sharedflag='-G' ;;
-	*Sun\ F*)			# Sun Fortran 8.3
-	  tmp_sharedflag='-G' ;;
-	esac
-	archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-
-        if test "x$supports_anon_versioning" = xyes; then
-          archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
-	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	    echo "local: *; };" >> $output_objdir/$libname.ver~
-	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
-        fi
-
-	case $cc_basename in
-	xlf* | bgf* | bgxlf* | mpixlf*)
-	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
-	  whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
-	  hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-	  archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
-	  if test "x$supports_anon_versioning" = xyes; then
-	    archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
-	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	      echo "local: *; };" >> $output_objdir/$libname.ver~
-	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-	  fi
-	  ;;
-	esac
-      else
-        ld_shlibs=no
-      fi
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
-	wlarc=
-      else
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      fi
-      ;;
-
-    solaris*)
-      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
-	ld_shlibs=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: The releases 2.8.* of the GNU linker cannot reliably
-*** create shared libraries on Solaris systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.9.1 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
-      case `$LD -v 2>&1` in
-        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
-	ld_shlibs=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
-*** reliably create shared libraries on SCO systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-	;;
-	*)
-	  # For security reasons, it is highly recommended that you always
-	  # use absolute paths for naming shared libraries, and exclude the
-	  # DT_RUNPATH tag from executables and libraries.  But doing so
-	  # requires that you compile everything twice, which is a pain.
-	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-	    archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	    archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-	  else
-	    ld_shlibs=no
-	  fi
-	;;
-      esac
-      ;;
-
-    sunos4*)
-      archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      wlarc=
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    *)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-    esac
-
-    if test "$ld_shlibs" = no; then
-      runpath_var=
-      hardcode_libdir_flag_spec=
-      export_dynamic_flag_spec=
-      whole_archive_flag_spec=
-    fi
-  else
-    # PORTME fill in a description of your system's linker (not GNU ld)
-    case $host_os in
-    aix3*)
-      allow_undefined_flag=unsupported
-      always_export_symbols=yes
-      archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
-      # Note: this linker hardcodes the directories in LIBPATH if there
-      # are no directories specified by -L.
-      hardcode_minus_L=yes
-      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
-	# Neither direct hardcoding nor static linking is supported with a
-	# broken collect2.
-	hardcode_direct=unsupported
-      fi
-      ;;
-
-    aix[4-9]*)
-      if test "$host_cpu" = ia64; then
-	# On IA64, the linker does run time linking by default, so we don't
-	# have to do anything special.
-	aix_use_runtimelinking=no
-	exp_sym_flag='-Bexport'
-	no_entry_flag=""
-      else
-	# If we're using GNU nm, then we don't want the "-C" option.
-	# -C means demangle to AIX nm, but means don't demangle with GNU nm
-	# Also, AIX nm treats weak defined symbols like other global
-	# defined symbols, whereas GNU nm marks them as "W".
-	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
-	  export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	else
-	  export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	fi
-	aix_use_runtimelinking=no
-
-	# Test if we are trying to use run time linking or normal
-	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
-	# need to do runtime linking.
-	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
-	  for ld_flag in $LDFLAGS; do
-	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
-	    aix_use_runtimelinking=yes
-	    break
-	  fi
-	  done
-	  ;;
-	esac
-
-	exp_sym_flag='-bexport'
-	no_entry_flag='-bnoentry'
-      fi
-
-      # When large executables or shared objects are built, AIX ld can
-      # have problems creating the table of contents.  If linking a library
-      # or program results in "error TOC overflow" add -mminimal-toc to
-      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
-      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
-      archive_cmds=''
-      hardcode_direct=yes
-      hardcode_direct_absolute=yes
-      hardcode_libdir_separator=':'
-      link_all_deplibs=yes
-      file_list_spec='${wl}-f,'
-
-      if test "$GCC" = yes; then
-	case $host_os in aix4.[012]|aix4.[012].*)
-	# We only want to do this on AIX 4.2 and lower, the check
-	# below for broken collect2 doesn't work under 4.3+
-	  collect2name=`${CC} -print-prog-name=collect2`
-	  if test -f "$collect2name" &&
-	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
-	  then
-	  # We have reworked collect2
-	  :
-	  else
-	  # We have old collect2
-	  hardcode_direct=unsupported
-	  # It fails to find uninstalled libraries when the uninstalled
-	  # path is not listed in the libpath.  Setting hardcode_minus_L
-	  # to unsupported forces relinking
-	  hardcode_minus_L=yes
-	  hardcode_libdir_flag_spec='-L$libdir'
-	  hardcode_libdir_separator=
-	  fi
-	  ;;
-	esac
-	shared_flag='-shared'
-	if test "$aix_use_runtimelinking" = yes; then
-	  shared_flag="$shared_flag "'${wl}-G'
-	fi
-      else
-	# not using gcc
-	if test "$host_cpu" = ia64; then
-	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
-	# chokes on -Wl,-G. The following line is correct:
-	  shared_flag='-G'
-	else
-	  if test "$aix_use_runtimelinking" = yes; then
-	    shared_flag='${wl}-G'
-	  else
-	    shared_flag='${wl}-bM:SRE'
-	  fi
-	fi
-      fi
-
-      export_dynamic_flag_spec='${wl}-bexpall'
-      # It seems that -bexpall does not export symbols beginning with
-      # underscore (_), so it is better to generate a list of symbols to export.
-      always_export_symbols=yes
-      if test "$aix_use_runtimelinking" = yes; then
-	# Warning - without using the other runtime loading flags (-brtl),
-	# -berok will link without error, but may produce a broken library.
-	allow_undefined_flag='-berok'
-        # Determine the default libpath from the value encoded in an
-        # empty executable.
-        if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath_+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath_
-fi
-
-        hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
-        archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-      else
-	if test "$host_cpu" = ia64; then
-	  hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
-	  allow_undefined_flag="-z nodefs"
-	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
-	else
-	 # Determine the default libpath from the value encoded in an
-	 # empty executable.
-	 if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath_+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath_
-fi
-
-	 hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
-	  # Warning - without using the other run time loading flags,
-	  # -berok will link without error, but may produce a broken library.
-	  no_undefined_flag=' ${wl}-bernotok'
-	  allow_undefined_flag=' ${wl}-berok'
-	  if test "$with_gnu_ld" = yes; then
-	    # We only use this code for GNU lds that support --whole-archive.
-	    whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
-	  else
-	    # Exported symbols can be pulled into shared objects from archives
-	    whole_archive_flag_spec='$convenience'
-	  fi
-	  archive_cmds_need_lc=yes
-	  # This is similar to how AIX traditionally builds its shared libraries.
-	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
-	fi
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds=''
-        ;;
-      m68k)
-            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec='-L$libdir'
-            hardcode_minus_L=yes
-        ;;
-      esac
-      ;;
-
-    bsdi[45]*)
-      export_dynamic_flag_spec=-rdynamic
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # When not using gcc, we currently assume that we are using
-      # Microsoft Visual C++.
-      # hardcode_libdir_flag_spec is actually meaningless, as there is
-      # no search path for DLLs.
-      case $cc_basename in
-      cl*)
-	# Native MSVC
-	hardcode_libdir_flag_spec=' '
-	allow_undefined_flag=unsupported
-	always_export_symbols=yes
-	file_list_spec='@'
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-	  else
-	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-	  fi~
-	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-	  linknames='
-	# The linker will not automatically build a static lib if we build a DLL.
-	# _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-	enable_shared_with_static_runtimes=yes
-	exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
-	export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-	# Don't use ranlib
-	old_postinstall_cmds='chmod 644 $oldlib'
-	postlink_cmds='lt_outputfile="@OUTPUT@"~
-	  lt_tool_outputfile="@TOOL_OUTPUT@"~
-	  case $lt_outputfile in
-	    *.exe|*.EXE) ;;
-	    *)
-	      lt_outputfile="$lt_outputfile.exe"
-	      lt_tool_outputfile="$lt_tool_outputfile.exe"
-	      ;;
-	  esac~
-	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-	    $RM "$lt_outputfile.manifest";
-	  fi'
-	;;
-      *)
-	# Assume MSVC wrapper
-	hardcode_libdir_flag_spec=' '
-	allow_undefined_flag=unsupported
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-	# The linker will automatically build a .lib file if we build a DLL.
-	old_archive_from_new_cmds='true'
-	# FIXME: Should let the user specify the lib program.
-	old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-	enable_shared_with_static_runtimes=yes
-	;;
-      esac
-      ;;
-
-    darwin* | rhapsody*)
-
-
-  archive_cmds_need_lc=no
-  hardcode_direct=no
-  hardcode_automatic=yes
-  hardcode_shlibpath_var=unsupported
-  if test "$lt_cv_ld_force_load" = "yes"; then
-    whole_archive_flag_spec='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
-
-  else
-    whole_archive_flag_spec=''
-  fi
-  link_all_deplibs=yes
-  allow_undefined_flag="$_lt_dar_allow_undefined"
-  case $cc_basename in
-     ifort*) _lt_dar_can_shared=yes ;;
-     *) _lt_dar_can_shared=$GCC ;;
-  esac
-  if test "$_lt_dar_can_shared" = "yes"; then
-    output_verbose_link_cmd=func_echo_all
-    archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
-    module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
-    archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
-    module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
-
-  else
-  ld_shlibs=no
-  fi
-
-      ;;
-
-    dgux*)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_shlibpath_var=no
-      ;;
-
-    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
-    # support.  Future versions do this automatically, but an explicit c++rt0.o
-    # does not break anything, and helps significantly (at the cost of a little
-    # extra space).
-    freebsd2.2*)
-      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
-    freebsd2.*)
-      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct=yes
-      hardcode_minus_L=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
-    freebsd* | dragonfly*)
-      archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    hpux9*)
-      if test "$GCC" = yes; then
-	archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      else
-	archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      fi
-      hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
-      hardcode_libdir_separator=:
-      hardcode_direct=yes
-
-      # hardcode_minus_L: Not really in the search PATH,
-      # but as the default location of the library.
-      hardcode_minus_L=yes
-      export_dynamic_flag_spec='${wl}-E'
-      ;;
-
-    hpux10*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator=:
-	hardcode_direct=yes
-	hardcode_direct_absolute=yes
-	export_dynamic_flag_spec='${wl}-E'
-	# hardcode_minus_L: Not really in the search PATH,
-	# but as the default location of the library.
-	hardcode_minus_L=yes
-      fi
-      ;;
-
-    hpux11*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	esac
-      else
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-
-	  # Older versions of the 11.00 compiler do not understand -b yet
-	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
-	  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5
-$as_echo_n "checking if $CC understands -b... " >&6; }
-if ${lt_cv_prog_compiler__b+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler__b=no
-   save_LDFLAGS="$LDFLAGS"
-   LDFLAGS="$LDFLAGS -b"
-   echo "$lt_simple_link_test_code" > conftest.$ac_ext
-   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
-     # The linker can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     if test -s conftest.err; then
-       # Append any errors to the config.log.
-       cat conftest.err 1>&5
-       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
-       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-       if diff conftest.exp conftest.er2 >/dev/null; then
-         lt_cv_prog_compiler__b=yes
-       fi
-     else
-       lt_cv_prog_compiler__b=yes
-     fi
-   fi
-   $RM -r conftest*
-   LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5
-$as_echo "$lt_cv_prog_compiler__b" >&6; }
-
-if test x"$lt_cv_prog_compiler__b" = xyes; then
-    archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-else
-    archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
-fi
-
-	  ;;
-	esac
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator=:
-
-	case $host_cpu in
-	hppa*64*|ia64*)
-	  hardcode_direct=no
-	  hardcode_shlibpath_var=no
-	  ;;
-	*)
-	  hardcode_direct=yes
-	  hardcode_direct_absolute=yes
-	  export_dynamic_flag_spec='${wl}-E'
-
-	  # hardcode_minus_L: Not really in the search PATH,
-	  # but as the default location of the library.
-	  hardcode_minus_L=yes
-	  ;;
-	esac
-      fi
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      if test "$GCC" = yes; then
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	# Try to use the -exported_symbol ld option, if it does not
-	# work, assume that -exports_file does not work either and
-	# implicitly export all symbols.
-	# This should be the same for all languages, so no per-tag cache variable.
-	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-if ${lt_cv_irix_exported_symbol+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  save_LDFLAGS="$LDFLAGS"
-	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-	   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-int foo (void) { return 0; }
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  lt_cv_irix_exported_symbol=yes
-else
-  lt_cv_irix_exported_symbol=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-           LDFLAGS="$save_LDFLAGS"
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-	if test "$lt_cv_irix_exported_symbol" = yes; then
-          archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-	fi
-      else
-	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-      fi
-      archive_cmds_need_lc='no'
-      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator=:
-      inherit_rpath=yes
-      link_all_deplibs=yes
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
-      else
-	archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
-      fi
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    newsos6)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct=yes
-      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator=:
-      hardcode_shlibpath_var=no
-      ;;
-
-    *nto* | *qnx*)
-      ;;
-
-    openbsd*)
-      if test -f /usr/libexec/ld.so; then
-	hardcode_direct=yes
-	hardcode_shlibpath_var=no
-	hardcode_direct_absolute=yes
-	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-	  archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
-	  hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
-	  export_dynamic_flag_spec='${wl}-E'
-	else
-	  case $host_os in
-	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
-	     archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-	     hardcode_libdir_flag_spec='-R$libdir'
-	     ;;
-	   *)
-	     archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	     hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
-	     ;;
-	  esac
-	fi
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    os2*)
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_minus_L=yes
-      allow_undefined_flag=unsupported
-      archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
-      old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
-      ;;
-
-    osf3*)
-      if test "$GCC" = yes; then
-	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-      else
-	allow_undefined_flag=' -expect_unresolved \*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-      fi
-      archive_cmds_need_lc='no'
-      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator=:
-      ;;
-
-    osf4* | osf5*)	# as osf3* with the addition of -msym flag
-      if test "$GCC" = yes; then
-	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      else
-	allow_undefined_flag=' -expect_unresolved \*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
-	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
-
-	# Both c and cxx compiler support -rpath directly
-	hardcode_libdir_flag_spec='-rpath $libdir'
-      fi
-      archive_cmds_need_lc='no'
-      hardcode_libdir_separator=:
-      ;;
-
-    solaris*)
-      no_undefined_flag=' -z defs'
-      if test "$GCC" = yes; then
-	wlarc='${wl}'
-	archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-      else
-	case `$CC -V 2>&1` in
-	*"Compilers 5.0"*)
-	  wlarc=''
-	  archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
-	  ;;
-	*)
-	  wlarc='${wl}'
-	  archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-	  ;;
-	esac
-      fi
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_shlibpath_var=no
-      case $host_os in
-      solaris2.[0-5] | solaris2.[0-5].*) ;;
-      *)
-	# The compiler driver will combine and reorder linker options,
-	# but understands `-z linker_flag'.  GCC discards it without `$wl',
-	# but is careful enough not to reorder.
-	# Supported since Solaris 2.6 (maybe 2.5.1?)
-	if test "$GCC" = yes; then
-	  whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
-	else
-	  whole_archive_flag_spec='-z allextract$convenience -z defaultextract'
-	fi
-	;;
-      esac
-      link_all_deplibs=yes
-      ;;
-
-    sunos4*)
-      if test "x$host_vendor" = xsequent; then
-	# Use $CC to link under sequent, because it throws in some extra .o
-	# files that make .init and .fini sections work.
-	archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_direct=yes
-      hardcode_minus_L=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    sysv4)
-      case $host_vendor in
-	sni)
-	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct=yes # is this really true???
-	;;
-	siemens)
-	  ## LD is ld it makes a PLAMLIB
-	  ## CC just makes a GrossModule.
-	  archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags'
-	  reload_cmds='$CC -r -o $output$reload_objs'
-	  hardcode_direct=no
-        ;;
-	motorola)
-	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct=no #Motorola manual says yes, but my tests say they lie
-	;;
-      esac
-      runpath_var='LD_RUN_PATH'
-      hardcode_shlibpath_var=no
-      ;;
-
-    sysv4.3*)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_shlibpath_var=no
-      export_dynamic_flag_spec='-Bexport'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	hardcode_shlibpath_var=no
-	runpath_var=LD_RUN_PATH
-	hardcode_runpath_var=yes
-	ld_shlibs=yes
-      fi
-      ;;
-
-    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
-      no_undefined_flag='${wl}-z,text'
-      archive_cmds_need_lc=no
-      hardcode_shlibpath_var=no
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6*)
-      # Note: We can NOT use -z defs as we might desire, because we do not
-      # link with -lc, and that would cause any symbols used from libc to
-      # always be unresolved, which means just about no library would
-      # ever link correctly.  If we're not using GNU ld we use -z text
-      # though, which does catch some bad symbols but isn't as heavy-handed
-      # as -z defs.
-      no_undefined_flag='${wl}-z,text'
-      allow_undefined_flag='${wl}-z,nodefs'
-      archive_cmds_need_lc=no
-      hardcode_shlibpath_var=no
-      hardcode_libdir_flag_spec='${wl}-R,$libdir'
-      hardcode_libdir_separator=':'
-      link_all_deplibs=yes
-      export_dynamic_flag_spec='${wl}-Bexport'
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    uts4*)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_shlibpath_var=no
-      ;;
-
-    *)
-      ld_shlibs=no
-      ;;
-    esac
-
-    if test x$host_vendor = xsni; then
-      case $host in
-      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
-	export_dynamic_flag_spec='${wl}-Blargedynsym'
-	;;
-      esac
-    fi
-  fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5
-$as_echo "$ld_shlibs" >&6; }
-test "$ld_shlibs" = no && can_build_shared=no
-
-with_gnu_ld=$with_gnu_ld
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$archive_cmds_need_lc" in
-x|xyes)
-  # Assume -lc should be added
-  archive_cmds_need_lc=yes
-
-  if test "$enable_shared" = yes && test "$GCC" = yes; then
-    case $archive_cmds in
-    *'~'*)
-      # FIXME: we may have to deal with multi-command sequences.
-      ;;
-    '$CC '*)
-      # Test whether the compiler implicitly links with -lc since on some
-      # systems, -lgcc has to come before -lc. If gcc already passes -lc
-      # to ld, don't add -lc before -lgcc.
-      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
-$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
-if ${lt_cv_archive_cmds_need_lc+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  $RM conftest*
-	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } 2>conftest.err; then
-	  soname=conftest
-	  lib=conftest
-	  libobjs=conftest.$ac_objext
-	  deplibs=
-	  wl=$lt_prog_compiler_wl
-	  pic_flag=$lt_prog_compiler_pic
-	  compiler_flags=-v
-	  linker_flags=-v
-	  verstring=
-	  output_objdir=.
-	  libname=conftest
-	  lt_save_allow_undefined_flag=$allow_undefined_flag
-	  allow_undefined_flag=
-	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
-  (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-	  then
-	    lt_cv_archive_cmds_need_lc=no
-	  else
-	    lt_cv_archive_cmds_need_lc=yes
-	  fi
-	  allow_undefined_flag=$lt_save_allow_undefined_flag
-	else
-	  cat conftest.err 1>&5
-	fi
-	$RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5
-$as_echo "$lt_cv_archive_cmds_need_lc" >&6; }
-      archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc
-      ;;
-    esac
-  fi
-  ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
-$as_echo_n "checking dynamic linker characteristics... " >&6; }
-
-if test "$GCC" = yes; then
-  case $host_os in
-    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
-    *) lt_awk_arg="/^libraries:/" ;;
-  esac
-  case $host_os in
-    mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;;
-    *) lt_sed_strip_eq="s,=/,/,g" ;;
-  esac
-  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
-  case $lt_search_path_spec in
-  *\;*)
-    # if the path contains ";" then we assume it to be the separator
-    # otherwise default to the standard path separator (i.e. ":") - it is
-    # assumed that no part of a normal pathname contains ";" but that should
-    # okay in the real world where ";" in dirpaths is itself problematic.
-    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
-    ;;
-  *)
-    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
-    ;;
-  esac
-  # Ok, now we have the path, separated by spaces, we can step through it
-  # and add multilib dir if necessary.
-  lt_tmp_lt_search_path_spec=
-  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
-  for lt_sys_path in $lt_search_path_spec; do
-    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
-      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
-    else
-      test -d "$lt_sys_path" && \
-	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
-    fi
-  done
-  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
-BEGIN {RS=" "; FS="/|\n";} {
-  lt_foo="";
-  lt_count=0;
-  for (lt_i = NF; lt_i > 0; lt_i--) {
-    if ($lt_i != "" && $lt_i != ".") {
-      if ($lt_i == "..") {
-        lt_count++;
-      } else {
-        if (lt_count == 0) {
-          lt_foo="/" $lt_i lt_foo;
-        } else {
-          lt_count--;
-        }
-      }
-    }
-  }
-  if (lt_foo != "") { lt_freq[lt_foo]++; }
-  if (lt_freq[lt_foo] == 1) { print lt_foo; }
-}'`
-  # AWK program above erroneously prepends '/' to C:/dos/paths
-  # for these hosts.
-  case $host_os in
-    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
-      $SED 's,/\([A-Za-z]:\),\1,g'` ;;
-  esac
-  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
-else
-  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
-fi
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
-  shlibpath_var=LIBPATH
-
-  # AIX 3 has no versioning support, so we append a major version to the name.
-  soname_spec='${libname}${release}${shared_ext}$major'
-  ;;
-
-aix[4-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  hardcode_into_libs=yes
-  if test "$host_cpu" = ia64; then
-    # AIX 5 supports IA64
-    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
-    shlibpath_var=LD_LIBRARY_PATH
-  else
-    # With GCC up to 2.95.x, collect2 would create an import file
-    # for dependence libraries.  The import file would start with
-    # the line `#! .'.  This would cause the generated library to
-    # depend on `.', always an invalid library.  This was fixed in
-    # development snapshots of GCC prior to 3.0.
-    case $host_os in
-      aix4 | aix4.[01] | aix4.[01].*)
-      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
-	   echo ' yes '
-	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
-	:
-      else
-	can_build_shared=no
-      fi
-      ;;
-    esac
-    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
-    # soname into executable. Probably we can add versioning support to
-    # collect2, so additional links can be useful in future.
-    if test "$aix_use_runtimelinking" = yes; then
-      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
-      # instead of lib<name>.a to let people know that these are not
-      # typical AIX shared libraries.
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    else
-      # We preserve .a as extension for shared libraries through AIX4.2
-      # and later when we are not doing run time linking.
-      library_names_spec='${libname}${release}.a $libname.a'
-      soname_spec='${libname}${release}${shared_ext}$major'
-    fi
-    shlibpath_var=LIBPATH
-  fi
-  ;;
-
-amigaos*)
-  case $host_cpu in
-  powerpc)
-    # Since July 2007 AmigaOS4 officially supports .so libraries.
-    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    ;;
-  m68k)
-    library_names_spec='$libname.ixlibrary $libname.a'
-    # Create ${libname}_ixlibrary.a entries in /sys/libs.
-    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
-    ;;
-  esac
-  ;;
-
-beos*)
-  library_names_spec='${libname}${shared_ext}'
-  dynamic_linker="$host_os ld.so"
-  shlibpath_var=LIBRARY_PATH
-  ;;
-
-bsdi[45]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
-  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
-  # the default ld.so.conf also contains /usr/contrib/lib and
-  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
-  # libtool to hard-code these into programs
-  ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
-  version_type=windows
-  shrext_cmds=".dll"
-  need_version=no
-  need_lib_prefix=no
-
-  case $GCC,$cc_basename in
-  yes,*)
-    # gcc
-    library_names_spec='$libname.dll.a'
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname~
-      chmod a+x \$dldir/$dlname~
-      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
-        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
-      fi'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-
-    case $host_os in
-    cygwin*)
-      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
-      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-
-      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"
-      ;;
-    mingw* | cegcc*)
-      # MinGW DLLs use traditional 'lib' prefix
-      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    pw32*)
-      # pw32 DLLs use 'pw' prefix rather than 'lib'
-      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    esac
-    dynamic_linker='Win32 ld.exe'
-    ;;
-
-  *,cl*)
-    # Native MSVC
-    libname_spec='$name'
-    soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-    library_names_spec='${libname}.dll.lib'
-
-    case $build_os in
-    mingw*)
-      sys_lib_search_path_spec=
-      lt_save_ifs=$IFS
-      IFS=';'
-      for lt_path in $LIB
-      do
-        IFS=$lt_save_ifs
-        # Let DOS variable expansion print the short 8.3 style file name.
-        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-      done
-      IFS=$lt_save_ifs
-      # Convert to MSYS style.
-      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-      ;;
-    cygwin*)
-      # Convert to unix form, then to dos form, then back to unix form
-      # but this time dos style (no spaces!) so that the unix form looks
-      # like /cygdrive/c/PROGRA~1:/cygdr...
-      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      ;;
-    *)
-      sys_lib_search_path_spec="$LIB"
-      if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-        # It is most probably a Windows format PATH.
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-      else
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      fi
-      # FIXME: find the short name or the path components, as spaces are
-      # common. (e.g. "Program Files" -> "PROGRA~1")
-      ;;
-    esac
-
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-    dynamic_linker='Win32 link.exe'
-    ;;
-
-  *)
-    # Assume MSVC wrapper
-    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-    dynamic_linker='Win32 ld.exe'
-    ;;
-  esac
-  # FIXME: first we should search . and the directory the executable is in
-  shlibpath_var=PATH
-  ;;
-
-darwin* | rhapsody*)
-  dynamic_linker="$host_os dyld"
-  version_type=darwin
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
-  soname_spec='${libname}${release}${major}$shared_ext'
-  shlibpath_overrides_runpath=yes
-  shlibpath_var=DYLD_LIBRARY_PATH
-  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-
-  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
-  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
-  ;;
-
-dgux*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-freebsd* | dragonfly*)
-  # DragonFly does not have aout.  When/if they implement a new
-  # versioning mechanism, adjust this.
-  if test -x /usr/bin/objformat; then
-    objformat=`/usr/bin/objformat`
-  else
-    case $host_os in
-    freebsd[23].*) objformat=aout ;;
-    *) objformat=elf ;;
-    esac
-  fi
-  version_type=freebsd-$objformat
-  case $version_type in
-    freebsd-elf*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-      need_version=no
-      need_lib_prefix=no
-      ;;
-    freebsd-*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
-      need_version=yes
-      ;;
-  esac
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_os in
-  freebsd2.*)
-    shlibpath_overrides_runpath=yes
-    ;;
-  freebsd3.[01]* | freebsdelf3.[01]*)
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
-  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
-    shlibpath_overrides_runpath=no
-    hardcode_into_libs=yes
-    ;;
-  *) # from 4.6 on, and DragonFly
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  esac
-  ;;
-
-gnu*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-haiku*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  dynamic_linker="$host_os runtime_loader"
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
-  hardcode_into_libs=yes
-  ;;
-
-hpux9* | hpux10* | hpux11*)
-  # Give a soname corresponding to the major version so that dld.sl refuses to
-  # link against other versions.
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  case $host_cpu in
-  ia64*)
-    shrext_cmds='.so'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.so"
-    shlibpath_var=LD_LIBRARY_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    if test "X$HPUX_IA64_MODE" = X32; then
-      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
-    else
-      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
-    fi
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  hppa*64*)
-    shrext_cmds='.sl'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  *)
-    shrext_cmds='.sl'
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=SHLIB_PATH
-    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    ;;
-  esac
-  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
-  postinstall_cmds='chmod 555 $lib'
-  # or fails outright, so override atomically:
-  install_override_mode=555
-  ;;
-
-interix[3-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-irix5* | irix6* | nonstopux*)
-  case $host_os in
-    nonstopux*) version_type=nonstopux ;;
-    *)
-	if test "$lt_cv_prog_gnu_ld" = yes; then
-		version_type=linux # correct to gnu/linux during the next big refactor
-	else
-		version_type=irix
-	fi ;;
-  esac
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
-  case $host_os in
-  irix5* | nonstopux*)
-    libsuff= shlibsuff=
-    ;;
-  *)
-    case $LD in # libtool.m4 will add one of these switches to LD
-    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
-      libsuff= shlibsuff= libmagic=32-bit;;
-    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
-      libsuff=32 shlibsuff=N32 libmagic=N32;;
-    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
-      libsuff=64 shlibsuff=64 libmagic=64-bit;;
-    *) libsuff= shlibsuff= libmagic=never-match;;
-    esac
-    ;;
-  esac
-  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
-  shlibpath_overrides_runpath=no
-  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
-  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
-  hardcode_into_libs=yes
-  ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
-  dynamic_linker=no
-  ;;
-
-# This must be glibc/ELF.
-linux* | k*bsd*-gnu | kopensolaris*-gnu)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-
-  # Some binutils ld are patched to set DT_RUNPATH
-  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_shlibpath_overrides_runpath=no
-    save_LDFLAGS=$LDFLAGS
-    save_libdir=$libdir
-    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \
-	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\""
-    cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
-  lt_cv_shlibpath_overrides_runpath=yes
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-    LDFLAGS=$save_LDFLAGS
-    libdir=$save_libdir
-
-fi
-
-  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
-
-  # This implies no fast_install, which is unacceptable.
-  # Some rework will be needed to allow for fast_install
-  # before this can be enabled.
-  hardcode_into_libs=yes
-
-  # Append ld.so.conf contents to the search path
-  if test -f /etc/ld.so.conf; then
-    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
-    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
-  fi
-
-  # We used to test for /lib/ld.so.1 and disable shared libraries on
-  # powerpc, because MkLinux only supported shared libraries with the
-  # GNU dynamic linker.  Since this was broken with cross compilers,
-  # most powerpc-linux boxes support dynamic linking these days and
-  # people can always --disable-shared, the test was removed, and we
-  # assume the GNU/Linux dynamic linker is in use.
-  dynamic_linker='GNU/Linux ld.so'
-  ;;
-
-netbsd*)
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-    dynamic_linker='NetBSD (a.out) ld.so'
-  else
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    dynamic_linker='NetBSD ld.elf_so'
-  fi
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  ;;
-
-newsos6)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  ;;
-
-*nto* | *qnx*)
-  version_type=qnx
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  dynamic_linker='ldqnx.so'
-  ;;
-
-openbsd*)
-  version_type=sunos
-  sys_lib_dlsearch_path_spec="/usr/lib"
-  need_lib_prefix=no
-  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
-  case $host_os in
-    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
-    *)				need_version=no  ;;
-  esac
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-    case $host_os in
-      openbsd2.[89] | openbsd2.[89].*)
-	shlibpath_overrides_runpath=no
-	;;
-      *)
-	shlibpath_overrides_runpath=yes
-	;;
-      esac
-  else
-    shlibpath_overrides_runpath=yes
-  fi
-  ;;
-
-os2*)
-  libname_spec='$name'
-  shrext_cmds=".dll"
-  need_lib_prefix=no
-  library_names_spec='$libname${shared_ext} $libname.a'
-  dynamic_linker='OS/2 ld.exe'
-  shlibpath_var=LIBPATH
-  ;;
-
-osf3* | osf4* | osf5*)
-  version_type=osf
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
-  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
-  ;;
-
-rdos*)
-  dynamic_linker=no
-  ;;
-
-solaris*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  # ldd complains unless libraries are executable
-  postinstall_cmds='chmod +x $lib'
-  ;;
-
-sunos4*)
-  version_type=sunos
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  if test "$with_gnu_ld" = yes; then
-    need_lib_prefix=no
-  fi
-  need_version=yes
-  ;;
-
-sysv4 | sysv4.3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_vendor in
-    sni)
-      shlibpath_overrides_runpath=no
-      need_lib_prefix=no
-      runpath_var=LD_RUN_PATH
-      ;;
-    siemens)
-      need_lib_prefix=no
-      ;;
-    motorola)
-      need_lib_prefix=no
-      need_version=no
-      shlibpath_overrides_runpath=no
-      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
-      ;;
-  esac
-  ;;
-
-sysv4*MP*)
-  if test -d /usr/nec ;then
-    version_type=linux # correct to gnu/linux during the next big refactor
-    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
-    soname_spec='$libname${shared_ext}.$major'
-    shlibpath_var=LD_LIBRARY_PATH
-  fi
-  ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
-  version_type=freebsd-elf
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  if test "$with_gnu_ld" = yes; then
-    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
-  else
-    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
-    case $host_os in
-      sco3.2v5*)
-        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
-	;;
-    esac
-  fi
-  sys_lib_dlsearch_path_spec='/usr/lib'
-  ;;
-
-tpf*)
-  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-uts4*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-*)
-  dynamic_linker=no
-  ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
-$as_echo "$dynamic_linker" >&6; }
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
-  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
-  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
-  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
-$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
-hardcode_action=
-if test -n "$hardcode_libdir_flag_spec" ||
-   test -n "$runpath_var" ||
-   test "X$hardcode_automatic" = "Xyes" ; then
-
-  # We can hardcode non-existent directories.
-  if test "$hardcode_direct" != no &&
-     # If the only mechanism to avoid hardcoding is shlibpath_var, we
-     # have to relink, otherwise we might link with an installed library
-     # when we should be linking with a yet-to-be-installed one
-     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no &&
-     test "$hardcode_minus_L" != no; then
-    # Linking always hardcodes the temporary library directory.
-    hardcode_action=relink
-  else
-    # We can link without hardcoding, and we can hardcode nonexisting dirs.
-    hardcode_action=immediate
-  fi
-else
-  # We cannot hardcode anything, or else we can only hardcode existing
-  # directories.
-  hardcode_action=unsupported
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5
-$as_echo "$hardcode_action" >&6; }
-
-if test "$hardcode_action" = relink ||
-   test "$inherit_rpath" = yes; then
-  # Fast installation is not supported
-  enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
-     test "$enable_shared" = no; then
-  # Fast installation is not necessary
-  enable_fast_install=needless
-fi
-
-
-
-
-
-
-  if test "x$enable_dlopen" != xyes; then
-  enable_dlopen=unknown
-  enable_dlopen_self=unknown
-  enable_dlopen_self_static=unknown
-else
-  lt_cv_dlopen=no
-  lt_cv_dlopen_libs=
-
-  case $host_os in
-  beos*)
-    lt_cv_dlopen="load_add_on"
-    lt_cv_dlopen_libs=
-    lt_cv_dlopen_self=yes
-    ;;
-
-  mingw* | pw32* | cegcc*)
-    lt_cv_dlopen="LoadLibrary"
-    lt_cv_dlopen_libs=
-    ;;
-
-  cygwin*)
-    lt_cv_dlopen="dlopen"
-    lt_cv_dlopen_libs=
-    ;;
-
-  darwin*)
-  # if libdl is installed we need to link against it
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
-$as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldl  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dl_dlopen=yes
-else
-  ac_cv_lib_dl_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
-$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
-else
-
-    lt_cv_dlopen="dyld"
-    lt_cv_dlopen_libs=
-    lt_cv_dlopen_self=yes
-
-fi
-
-    ;;
-
-  *)
-    ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
-if test "x$ac_cv_func_shl_load" = xyes; then :
-  lt_cv_dlopen="shl_load"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
-$as_echo_n "checking for shl_load in -ldld... " >&6; }
-if ${ac_cv_lib_dld_shl_load+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldld  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char shl_load ();
-int
-main ()
-{
-return shl_load ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dld_shl_load=yes
-else
-  ac_cv_lib_dld_shl_load=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
-$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
-if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
-  lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"
-else
-  ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen"
-if test "x$ac_cv_func_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
-$as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldl  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dl_dlopen=yes
-else
-  ac_cv_lib_dl_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
-$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
-$as_echo_n "checking for dlopen in -lsvld... " >&6; }
-if ${ac_cv_lib_svld_dlopen+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-lsvld  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_svld_dlopen=yes
-else
-  ac_cv_lib_svld_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
-$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
-if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
-$as_echo_n "checking for dld_link in -ldld... " >&6; }
-if ${ac_cv_lib_dld_dld_link+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldld  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dld_link ();
-int
-main ()
-{
-return dld_link ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dld_dld_link=yes
-else
-  ac_cv_lib_dld_dld_link=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
-$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
-if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
-  lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-    ;;
-  esac
-
-  if test "x$lt_cv_dlopen" != xno; then
-    enable_dlopen=yes
-  else
-    enable_dlopen=no
-  fi
-
-  case $lt_cv_dlopen in
-  dlopen)
-    save_CPPFLAGS="$CPPFLAGS"
-    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
-
-    save_LDFLAGS="$LDFLAGS"
-    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
-
-    save_LIBS="$LIBS"
-    LIBS="$lt_cv_dlopen_libs $LIBS"
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5
-$as_echo_n "checking whether a program can dlopen itself... " >&6; }
-if ${lt_cv_dlopen_self+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-     if test "$cross_compiling" = yes; then :
-  lt_cv_dlopen_self=cross
-else
-  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
-  lt_status=$lt_dlunknown
-  cat > conftest.$ac_ext <<_LT_EOF
-#line $LINENO "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-#  define LT_DLGLOBAL		RTLD_GLOBAL
-#else
-#  ifdef DL_GLOBAL
-#    define LT_DLGLOBAL		DL_GLOBAL
-#  else
-#    define LT_DLGLOBAL		0
-#  endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
-   find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-#  ifdef RTLD_LAZY
-#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
-#  else
-#    ifdef DL_LAZY
-#      define LT_DLLAZY_OR_NOW		DL_LAZY
-#    else
-#      ifdef RTLD_NOW
-#        define LT_DLLAZY_OR_NOW	RTLD_NOW
-#      else
-#        ifdef DL_NOW
-#          define LT_DLLAZY_OR_NOW	DL_NOW
-#        else
-#          define LT_DLLAZY_OR_NOW	0
-#        endif
-#      endif
-#    endif
-#  endif
-#endif
-
-/* When -fvisbility=hidden is used, assume the code has been annotated
-   correspondingly for the symbols needed.  */
-#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
-int fnord () __attribute__((visibility("default")));
-#endif
-
-int fnord () { return 42; }
-int main ()
-{
-  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-  int status = $lt_dlunknown;
-
-  if (self)
-    {
-      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
-      else
-        {
-	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
-          else puts (dlerror ());
-	}
-      /* dlclose (self); */
-    }
-  else
-    puts (dlerror ());
-
-  return status;
-}
-_LT_EOF
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
-    (./conftest; exit; ) >&5 2>/dev/null
-    lt_status=$?
-    case x$lt_status in
-      x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;;
-      x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;;
-      x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;;
-    esac
-  else :
-    # compilation failed
-    lt_cv_dlopen_self=no
-  fi
-fi
-rm -fr conftest*
-
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5
-$as_echo "$lt_cv_dlopen_self" >&6; }
-
-    if test "x$lt_cv_dlopen_self" = xyes; then
-      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
-      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5
-$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; }
-if ${lt_cv_dlopen_self_static+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-     if test "$cross_compiling" = yes; then :
-  lt_cv_dlopen_self_static=cross
-else
-  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
-  lt_status=$lt_dlunknown
-  cat > conftest.$ac_ext <<_LT_EOF
-#line $LINENO "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-#  define LT_DLGLOBAL		RTLD_GLOBAL
-#else
-#  ifdef DL_GLOBAL
-#    define LT_DLGLOBAL		DL_GLOBAL
-#  else
-#    define LT_DLGLOBAL		0
-#  endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
-   find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-#  ifdef RTLD_LAZY
-#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
-#  else
-#    ifdef DL_LAZY
-#      define LT_DLLAZY_OR_NOW		DL_LAZY
-#    else
-#      ifdef RTLD_NOW
-#        define LT_DLLAZY_OR_NOW	RTLD_NOW
-#      else
-#        ifdef DL_NOW
-#          define LT_DLLAZY_OR_NOW	DL_NOW
-#        else
-#          define LT_DLLAZY_OR_NOW	0
-#        endif
-#      endif
-#    endif
-#  endif
-#endif
-
-/* When -fvisbility=hidden is used, assume the code has been annotated
-   correspondingly for the symbols needed.  */
-#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
-int fnord () __attribute__((visibility("default")));
-#endif
-
-int fnord () { return 42; }
-int main ()
-{
-  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-  int status = $lt_dlunknown;
-
-  if (self)
-    {
-      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
-      else
-        {
-	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
-          else puts (dlerror ());
-	}
-      /* dlclose (self); */
-    }
-  else
-    puts (dlerror ());
-
-  return status;
-}
-_LT_EOF
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
-    (./conftest; exit; ) >&5 2>/dev/null
-    lt_status=$?
-    case x$lt_status in
-      x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;;
-      x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;;
-      x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;;
-    esac
-  else :
-    # compilation failed
-    lt_cv_dlopen_self_static=no
-  fi
-fi
-rm -fr conftest*
-
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5
-$as_echo "$lt_cv_dlopen_self_static" >&6; }
-    fi
-
-    CPPFLAGS="$save_CPPFLAGS"
-    LDFLAGS="$save_LDFLAGS"
-    LIBS="$save_LIBS"
-    ;;
-  esac
-
-  case $lt_cv_dlopen_self in
-  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
-  *) enable_dlopen_self=unknown ;;
-  esac
-
-  case $lt_cv_dlopen_self_static in
-  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
-  *) enable_dlopen_self_static=unknown ;;
-  esac
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-striplib=
-old_striplib=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5
-$as_echo_n "checking whether stripping libraries is possible... " >&6; }
-if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
-  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
-  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-# FIXME - insert some real tests, host_os isn't really good enough
-  case $host_os in
-  darwin*)
-    if test -n "$STRIP" ; then
-      striplib="$STRIP -x"
-      old_striplib="$STRIP -S"
-      { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-    else
-      { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-    fi
-    ;;
-  *)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-    ;;
-  esac
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-  # Report which library types will actually be built
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
-$as_echo_n "checking if libtool supports shared libraries... " >&6; }
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
-$as_echo "$can_build_shared" >&6; }
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
-$as_echo_n "checking whether to build shared libraries... " >&6; }
-  test "$can_build_shared" = "no" && enable_shared=no
-
-  # On AIX, shared libraries and static libraries use the same namespace, and
-  # are all built from PIC.
-  case $host_os in
-  aix3*)
-    test "$enable_shared" = yes && enable_static=no
-    if test -n "$RANLIB"; then
-      archive_cmds="$archive_cmds~\$RANLIB \$lib"
-      postinstall_cmds='$RANLIB $lib'
-    fi
-    ;;
-
-  aix[4-9]*)
-    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
-      test "$enable_shared" = yes && enable_static=no
-    fi
-    ;;
-  esac
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
-$as_echo "$enable_shared" >&6; }
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
-$as_echo_n "checking whether to build static libraries... " >&6; }
-  # Make sure either enable_shared or enable_static is yes.
-  test "$enable_shared" = yes || enable_static=yes
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
-$as_echo "$enable_static" >&6; }
-
-
-
-
-fi
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-CC="$lt_save_CC"
-
-
-
-
-
-      ac_ext=${ac_fc_srcext-f}
-ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5'
-ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_fc_compiler_gnu
-
-
-if test -z "$FC" || test "X$FC" = "Xno"; then
-  _lt_disable_FC=yes
-fi
-
-archive_cmds_need_lc_FC=no
-allow_undefined_flag_FC=
-always_export_symbols_FC=no
-archive_expsym_cmds_FC=
-export_dynamic_flag_spec_FC=
-hardcode_direct_FC=no
-hardcode_direct_absolute_FC=no
-hardcode_libdir_flag_spec_FC=
-hardcode_libdir_separator_FC=
-hardcode_minus_L_FC=no
-hardcode_automatic_FC=no
-inherit_rpath_FC=no
-module_cmds_FC=
-module_expsym_cmds_FC=
-link_all_deplibs_FC=unknown
-old_archive_cmds_FC=$old_archive_cmds
-reload_flag_FC=$reload_flag
-reload_cmds_FC=$reload_cmds
-no_undefined_flag_FC=
-whole_archive_flag_spec_FC=
-enable_shared_with_static_runtimes_FC=no
-
-# Source file extension for fc test sources.
-ac_ext=${ac_fc_srcext-f}
-
-# Object file extension for compiled fc test sources.
-objext=o
-objext_FC=$objext
-
-# No sense in running all these tests if we already determined that
-# the FC compiler isn't working.  Some variables (like enable_shared)
-# are currently assumed to apply to all compilers on this platform,
-# and will be corrupted by setting them based on a non-working compiler.
-if test "$_lt_disable_FC" != yes; then
-  # Code to be used in simple compile tests
-  lt_simple_compile_test_code="\
-      subroutine t
-      return
-      end
-"
-
-  # Code to be used in simple link tests
-  lt_simple_link_test_code="\
-      program t
-      end
-"
-
-  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-
-  # save warnings/boilerplate of simple test code
-  ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-
-  ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-
-
-  # Allow CC to be a program name with arguments.
-  lt_save_CC="$CC"
-  lt_save_GCC=$GCC
-  lt_save_CFLAGS=$CFLAGS
-  CC=${FC-"f95"}
-  CFLAGS=$FCFLAGS
-  compiler=$CC
-  GCC=$ac_cv_fc_compiler_gnu
-
-  compiler_FC=$CC
-  for cc_temp in $compiler""; do
-  case $cc_temp in
-    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
-    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
-    \-*) ;;
-    *) break;;
-  esac
-done
-cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
-
-
-  if test -n "$compiler"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
-$as_echo_n "checking if libtool supports shared libraries... " >&6; }
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
-$as_echo "$can_build_shared" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
-$as_echo_n "checking whether to build shared libraries... " >&6; }
-    test "$can_build_shared" = "no" && enable_shared=no
-
-    # On AIX, shared libraries and static libraries use the same namespace, and
-    # are all built from PIC.
-    case $host_os in
-      aix3*)
-        test "$enable_shared" = yes && enable_static=no
-        if test -n "$RANLIB"; then
-          archive_cmds="$archive_cmds~\$RANLIB \$lib"
-          postinstall_cmds='$RANLIB $lib'
-        fi
-        ;;
-      aix[4-9]*)
-	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
-	  test "$enable_shared" = yes && enable_static=no
-	fi
-        ;;
-    esac
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
-$as_echo "$enable_shared" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
-$as_echo_n "checking whether to build static libraries... " >&6; }
-    # Make sure either enable_shared or enable_static is yes.
-    test "$enable_shared" = yes || enable_static=yes
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
-$as_echo "$enable_static" >&6; }
-
-    GCC_FC="$ac_cv_fc_compiler_gnu"
-    LD_FC="$LD"
-
-    ## CAVEAT EMPTOR:
-    ## There is no encapsulation within the following macros, do not change
-    ## the running order or otherwise move them around unless you know exactly
-    ## what you are doing...
-    # Dependencies to place before and after the object being linked:
-predep_objects_FC=
-postdep_objects_FC=
-predeps_FC=
-postdeps_FC=
-compiler_lib_search_path_FC=
-
-cat > conftest.$ac_ext <<_LT_EOF
-      subroutine foo
-      implicit none
-      integer a
-      a=0
-      return
-      end
-_LT_EOF
-
-
-_lt_libdeps_save_CFLAGS=$CFLAGS
-case "$CC $CFLAGS " in #(
-*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
-*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
-*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
-esac
-
-if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-  # Parse the compiler output and extract the necessary
-  # objects, libraries and library flags.
-
-  # Sentinel used to keep track of whether or not we are before
-  # the conftest object file.
-  pre_test_object_deps_done=no
-
-  for p in `eval "$output_verbose_link_cmd"`; do
-    case ${prev}${p} in
-
-    -L* | -R* | -l*)
-       # Some compilers place space between "-{L,R}" and the path.
-       # Remove the space.
-       if test $p = "-L" ||
-          test $p = "-R"; then
-	 prev=$p
-	 continue
-       fi
-
-       # Expand the sysroot to ease extracting the directories later.
-       if test -z "$prev"; then
-         case $p in
-         -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
-         -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
-         -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
-         esac
-       fi
-       case $p in
-       =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
-       esac
-       if test "$pre_test_object_deps_done" = no; then
-	 case ${prev} in
-	 -L | -R)
-	   # Internal compiler library paths should come after those
-	   # provided the user.  The postdeps already come after the
-	   # user supplied libs so there is no need to process them.
-	   if test -z "$compiler_lib_search_path_FC"; then
-	     compiler_lib_search_path_FC="${prev}${p}"
-	   else
-	     compiler_lib_search_path_FC="${compiler_lib_search_path_FC} ${prev}${p}"
-	   fi
-	   ;;
-	 # The "-l" case would never come before the object being
-	 # linked, so don't bother handling this case.
-	 esac
-       else
-	 if test -z "$postdeps_FC"; then
-	   postdeps_FC="${prev}${p}"
-	 else
-	   postdeps_FC="${postdeps_FC} ${prev}${p}"
-	 fi
-       fi
-       prev=
-       ;;
-
-    *.lto.$objext) ;; # Ignore GCC LTO objects
-    *.$objext)
-       # This assumes that the test object file only shows up
-       # once in the compiler output.
-       if test "$p" = "conftest.$objext"; then
-	 pre_test_object_deps_done=yes
-	 continue
-       fi
-
-       if test "$pre_test_object_deps_done" = no; then
-	 if test -z "$predep_objects_FC"; then
-	   predep_objects_FC="$p"
-	 else
-	   predep_objects_FC="$predep_objects_FC $p"
-	 fi
-       else
-	 if test -z "$postdep_objects_FC"; then
-	   postdep_objects_FC="$p"
-	 else
-	   postdep_objects_FC="$postdep_objects_FC $p"
-	 fi
-       fi
-       ;;
-
-    *) ;; # Ignore the rest.
-
-    esac
-  done
-
-  # Clean up.
-  rm -f a.out a.exe
-else
-  echo "libtool.m4: error: problem compiling FC test program"
-fi
-
-$RM -f confest.$objext
-CFLAGS=$_lt_libdeps_save_CFLAGS
-
-# PORTME: override above test on systems where it is broken
-
-
-case " $postdeps_FC " in
-*" -lc "*) archive_cmds_need_lc_FC=no ;;
-esac
- compiler_lib_search_dirs_FC=
-if test -n "${compiler_lib_search_path_FC}"; then
- compiler_lib_search_dirs_FC=`echo " ${compiler_lib_search_path_FC}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    lt_prog_compiler_wl_FC=
-lt_prog_compiler_pic_FC=
-lt_prog_compiler_static_FC=
-
-
-  if test "$GCC" = yes; then
-    lt_prog_compiler_wl_FC='-Wl,'
-    lt_prog_compiler_static_FC='-static'
-
-    case $host_os in
-      aix*)
-      # All AIX code is PIC.
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static_FC='-Bstatic'
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            lt_prog_compiler_pic_FC='-fPIC'
-        ;;
-      m68k)
-            # FIXME: we need at least 68020 code to build shared libraries, but
-            # adding the `-m68020' flag to GCC prevents building anything better,
-            # like `-m68040'.
-            lt_prog_compiler_pic_FC='-m68020 -resident32 -malways-restore-a4'
-        ;;
-      esac
-      ;;
-
-    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
-      # PIC is the default for these OSes.
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      # Although the cygwin gcc ignores -fPIC, still need this for old-style
-      # (--disable-auto-import) libraries
-      lt_prog_compiler_pic_FC='-DDLL_EXPORT'
-      ;;
-
-    darwin* | rhapsody*)
-      # PIC is the default on this platform
-      # Common symbols not allowed in MH_DYLIB files
-      lt_prog_compiler_pic_FC='-fno-common'
-      ;;
-
-    haiku*)
-      # PIC is the default for Haiku.
-      # The "-static" flag exists, but is broken.
-      lt_prog_compiler_static_FC=
-      ;;
-
-    hpux*)
-      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
-      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
-      # sets the default TLS model and affects inlining.
-      case $host_cpu in
-      hppa*64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic_FC='-fPIC'
-	;;
-      esac
-      ;;
-
-    interix[3-9]*)
-      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
-      # Instead, we relocate shared libraries at runtime.
-      ;;
-
-    msdosdjgpp*)
-      # Just because we use GCC doesn't mean we suddenly get shared libraries
-      # on systems that don't support them.
-      lt_prog_compiler_can_build_shared_FC=no
-      enable_shared=no
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic_FC='-fPIC -shared'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	lt_prog_compiler_pic_FC=-Kconform_pic
-      fi
-      ;;
-
-    *)
-      lt_prog_compiler_pic_FC='-fPIC'
-      ;;
-    esac
-
-    case $cc_basename in
-    nvcc*) # Cuda Compiler Driver 2.2
-      lt_prog_compiler_wl_FC='-Xlinker '
-      if test -n "$lt_prog_compiler_pic_FC"; then
-        lt_prog_compiler_pic_FC="-Xcompiler $lt_prog_compiler_pic_FC"
-      fi
-      ;;
-    esac
-  else
-    # PORTME Check for flag to pass linker flags through the system compiler.
-    case $host_os in
-    aix*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static_FC='-Bstatic'
-      else
-	lt_prog_compiler_static_FC='-bnso -bI:/lib/syscalls.exp'
-      fi
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      lt_prog_compiler_pic_FC='-DDLL_EXPORT'
-      ;;
-
-    hpux9* | hpux10* | hpux11*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
-      # not for PA HP-UX.
-      case $host_cpu in
-      hppa*64*|ia64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic_FC='+Z'
-	;;
-      esac
-      # Is there a better lt_prog_compiler_static that works with the bundled CC?
-      lt_prog_compiler_static_FC='${wl}-a ${wl}archive'
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      # PIC (with -KPIC) is the default.
-      lt_prog_compiler_static_FC='-non_shared'
-      ;;
-
-    linux* | k*bsd*-gnu | kopensolaris*-gnu)
-      case $cc_basename in
-      # old Intel for x86_64 which still supported -KPIC.
-      ecc*)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-KPIC'
-	lt_prog_compiler_static_FC='-static'
-        ;;
-      # icc used to be incompatible with GCC.
-      # ICC 10 doesn't accept -KPIC any more.
-      icc* | ifort*)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-fPIC'
-	lt_prog_compiler_static_FC='-static'
-        ;;
-      # Lahey Fortran 8.1.
-      lf95*)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='--shared'
-	lt_prog_compiler_static_FC='--static'
-	;;
-      nagfor*)
-	# NAG Fortran compiler
-	lt_prog_compiler_wl_FC='-Wl,-Wl,,'
-	lt_prog_compiler_pic_FC='-PIC'
-	lt_prog_compiler_static_FC='-Bstatic'
-	;;
-      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
-        # Portland Group compilers (*not* the Pentium gcc compiler,
-	# which looks to be a dead project)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-fpic'
-	lt_prog_compiler_static_FC='-Bstatic'
-        ;;
-      ccc*)
-        lt_prog_compiler_wl_FC='-Wl,'
-        # All Alpha code is PIC.
-        lt_prog_compiler_static_FC='-non_shared'
-        ;;
-      xl* | bgxl* | bgf* | mpixl*)
-	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-qpic'
-	lt_prog_compiler_static_FC='-qstaticlink'
-	;;
-      *)
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*)
-	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
-	  lt_prog_compiler_pic_FC='-KPIC'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  lt_prog_compiler_wl_FC=''
-	  ;;
-	*Sun\ F* | *Sun*Fortran*)
-	  lt_prog_compiler_pic_FC='-KPIC'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  lt_prog_compiler_wl_FC='-Qoption ld '
-	  ;;
-	*Sun\ C*)
-	  # Sun C 5.9
-	  lt_prog_compiler_pic_FC='-KPIC'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  lt_prog_compiler_wl_FC='-Wl,'
-	  ;;
-        *Intel*\ [CF]*Compiler*)
-	  lt_prog_compiler_wl_FC='-Wl,'
-	  lt_prog_compiler_pic_FC='-fPIC'
-	  lt_prog_compiler_static_FC='-static'
-	  ;;
-	*Portland\ Group*)
-	  lt_prog_compiler_wl_FC='-Wl,'
-	  lt_prog_compiler_pic_FC='-fpic'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  ;;
-	esac
-	;;
-      esac
-      ;;
-
-    newsos6)
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic_FC='-fPIC -shared'
-      ;;
-
-    osf3* | osf4* | osf5*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      # All OSF/1 code is PIC.
-      lt_prog_compiler_static_FC='-non_shared'
-      ;;
-
-    rdos*)
-      lt_prog_compiler_static_FC='-non_shared'
-      ;;
-
-    solaris*)
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      case $cc_basename in
-      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
-	lt_prog_compiler_wl_FC='-Qoption ld ';;
-      *)
-	lt_prog_compiler_wl_FC='-Wl,';;
-      esac
-      ;;
-
-    sunos4*)
-      lt_prog_compiler_wl_FC='-Qoption ld '
-      lt_prog_compiler_pic_FC='-PIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    sysv4 | sysv4.2uw2* | sysv4.3*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec ;then
-	lt_prog_compiler_pic_FC='-Kconform_pic'
-	lt_prog_compiler_static_FC='-Bstatic'
-      fi
-      ;;
-
-    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    unicos*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      lt_prog_compiler_can_build_shared_FC=no
-      ;;
-
-    uts4*)
-      lt_prog_compiler_pic_FC='-pic'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    *)
-      lt_prog_compiler_can_build_shared_FC=no
-      ;;
-    esac
-  fi
-
-case $host_os in
-  # For platforms which do not support PIC, -DPIC is meaningless:
-  *djgpp*)
-    lt_prog_compiler_pic_FC=
-    ;;
-  *)
-    lt_prog_compiler_pic_FC="$lt_prog_compiler_pic_FC"
-    ;;
-esac
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-if ${lt_cv_prog_compiler_pic_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic_FC=$lt_prog_compiler_pic_FC
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_FC" >&5
-$as_echo "$lt_cv_prog_compiler_pic_FC" >&6; }
-lt_prog_compiler_pic_FC=$lt_cv_prog_compiler_pic_FC
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$lt_prog_compiler_pic_FC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_FC works" >&5
-$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_FC works... " >&6; }
-if ${lt_cv_prog_compiler_pic_works_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic_works_FC=no
-   ac_outfile=conftest.$ac_objext
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-   lt_compiler_flag="$lt_prog_compiler_pic_FC"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   # The option is referenced via a variable to avoid confusing sed.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>conftest.err)
-   ac_status=$?
-   cat conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s "$ac_outfile"; then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings other than the usual output.
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
-     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_pic_works_FC=yes
-     fi
-   fi
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_FC" >&5
-$as_echo "$lt_cv_prog_compiler_pic_works_FC" >&6; }
-
-if test x"$lt_cv_prog_compiler_pic_works_FC" = xyes; then
-    case $lt_prog_compiler_pic_FC in
-     "" | " "*) ;;
-     *) lt_prog_compiler_pic_FC=" $lt_prog_compiler_pic_FC" ;;
-     esac
-else
-    lt_prog_compiler_pic_FC=
-     lt_prog_compiler_can_build_shared_FC=no
-fi
-
-fi
-
-
-
-
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$lt_prog_compiler_wl_FC eval lt_tmp_static_flag=\"$lt_prog_compiler_static_FC\"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
-$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
-if ${lt_cv_prog_compiler_static_works_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_static_works_FC=no
-   save_LDFLAGS="$LDFLAGS"
-   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
-   echo "$lt_simple_link_test_code" > conftest.$ac_ext
-   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
-     # The linker can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     if test -s conftest.err; then
-       # Append any errors to the config.log.
-       cat conftest.err 1>&5
-       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
-       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-       if diff conftest.exp conftest.er2 >/dev/null; then
-         lt_cv_prog_compiler_static_works_FC=yes
-       fi
-     else
-       lt_cv_prog_compiler_static_works_FC=yes
-     fi
-   fi
-   $RM -r conftest*
-   LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_FC" >&5
-$as_echo "$lt_cv_prog_compiler_static_works_FC" >&6; }
-
-if test x"$lt_cv_prog_compiler_static_works_FC" = xyes; then
-    :
-else
-    lt_prog_compiler_static_FC=
-fi
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o_FC=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o_FC=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5
-$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; }
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o_FC=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o_FC=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5
-$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; }
-
-
-
-
-hard_links="nottested"
-if test "$lt_cv_prog_compiler_c_o_FC" = no && test "$need_locks" != no; then
-  # do not overwrite the value of need_locks provided by the user
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
-$as_echo_n "checking if we can lock with hard links... " >&6; }
-  hard_links=yes
-  $RM conftest*
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  touch conftest.a
-  ln conftest.a conftest.b 2>&5 || hard_links=no
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
-$as_echo "$hard_links" >&6; }
-  if test "$hard_links" = no; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
-$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
-    need_locks=warn
-  fi
-else
-  need_locks=no
-fi
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
-  runpath_var=
-  allow_undefined_flag_FC=
-  always_export_symbols_FC=no
-  archive_cmds_FC=
-  archive_expsym_cmds_FC=
-  compiler_needs_object_FC=no
-  enable_shared_with_static_runtimes_FC=no
-  export_dynamic_flag_spec_FC=
-  export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-  hardcode_automatic_FC=no
-  hardcode_direct_FC=no
-  hardcode_direct_absolute_FC=no
-  hardcode_libdir_flag_spec_FC=
-  hardcode_libdir_separator_FC=
-  hardcode_minus_L_FC=no
-  hardcode_shlibpath_var_FC=unsupported
-  inherit_rpath_FC=no
-  link_all_deplibs_FC=unknown
-  module_cmds_FC=
-  module_expsym_cmds_FC=
-  old_archive_from_new_cmds_FC=
-  old_archive_from_expsyms_cmds_FC=
-  thread_safe_flag_spec_FC=
-  whole_archive_flag_spec_FC=
-  # include_expsyms should be a list of space-separated symbols to be *always*
-  # included in the symbol list
-  include_expsyms_FC=
-  # exclude_expsyms can be an extended regexp of symbols to exclude
-  # it will be wrapped by ` (' and `)$', so one must not match beginning or
-  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
-  # as well as any symbol that contains `d'.
-  exclude_expsyms_FC='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
-  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
-  # platforms (ab)use it in PIC code, but their linkers get confused if
-  # the symbol is explicitly referenced.  Since portable code cannot
-  # rely on this symbol name, it's probably fine to never include it in
-  # preloaded symbol tables.
-  # Exclude shared library initialization/finalization symbols.
-  extract_expsyms_cmds=
-
-  case $host_os in
-  cygwin* | mingw* | pw32* | cegcc*)
-    # FIXME: the MSVC++ port hasn't been tested in a loooong time
-    # When not using gcc, we currently assume that we are using
-    # Microsoft Visual C++.
-    if test "$GCC" != yes; then
-      with_gnu_ld=no
-    fi
-    ;;
-  interix*)
-    # we just hope/assume this is gcc and not c89 (= MSVC++)
-    with_gnu_ld=yes
-    ;;
-  openbsd*)
-    with_gnu_ld=no
-    ;;
-  esac
-
-  ld_shlibs_FC=yes
-
-  # On some targets, GNU ld is compatible enough with the native linker
-  # that we're better off using the native interface for both.
-  lt_use_gnu_ld_interface=no
-  if test "$with_gnu_ld" = yes; then
-    case $host_os in
-      aix*)
-	# The AIX port of GNU ld has always aspired to compatibility
-	# with the native linker.  However, as the warning in the GNU ld
-	# block says, versions before 2.19.5* couldn't really create working
-	# shared libraries, regardless of the interface used.
-	case `$LD -v 2>&1` in
-	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
-	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
-	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
-	  *)
-	    lt_use_gnu_ld_interface=yes
-	    ;;
-	esac
-	;;
-      *)
-	lt_use_gnu_ld_interface=yes
-	;;
-    esac
-  fi
-
-  if test "$lt_use_gnu_ld_interface" = yes; then
-    # If archive_cmds runs LD, not CC, wlarc should be empty
-    wlarc='${wl}'
-
-    # Set some defaults for GNU ld with shared library support. These
-    # are reset later if shared libraries are not supported. Putting them
-    # here allows them to be overridden if necessary.
-    runpath_var=LD_RUN_PATH
-    hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-    export_dynamic_flag_spec_FC='${wl}--export-dynamic'
-    # ancient GNU ld didn't support --whole-archive et. al.
-    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
-      whole_archive_flag_spec_FC="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
-    else
-      whole_archive_flag_spec_FC=
-    fi
-    supports_anon_versioning=no
-    case `$LD -v 2>&1` in
-      *GNU\ gold*) supports_anon_versioning=yes ;;
-      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
-      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
-      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
-      *\ 2.11.*) ;; # other 2.11 versions
-      *) supports_anon_versioning=yes ;;
-    esac
-
-    # See if GNU ld supports shared libraries.
-    case $host_os in
-    aix[3-9]*)
-      # On AIX/PPC, the GNU linker is very broken
-      if test "$host_cpu" != ia64; then
-	ld_shlibs_FC=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: the GNU linker, at least up to release 2.19, is reported
-*** to be unable to reliably create shared libraries on AIX.
-*** Therefore, libtool is disabling shared libraries support.  If you
-*** really care for shared libraries, you may want to install binutils
-*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
-*** You will then need to restart the configuration process.
-
-_LT_EOF
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds_FC=''
-        ;;
-      m68k)
-            archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec_FC='-L$libdir'
-            hardcode_minus_L_FC=yes
-        ;;
-      esac
-      ;;
-
-    beos*)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	allow_undefined_flag_FC=unsupported
-	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
-	# support --undefined.  This deserves some investigation.  FIXME
-	archive_cmds_FC='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # _LT_TAGVAR(hardcode_libdir_flag_spec, FC) is actually meaningless,
-      # as there is no search path for DLLs.
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      export_dynamic_flag_spec_FC='${wl}--export-all-symbols'
-      allow_undefined_flag_FC=unsupported
-      always_export_symbols_FC=no
-      enable_shared_with_static_runtimes_FC=yes
-      export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-      exclude_expsyms_FC='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
-      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-        archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-	# If the export-symbols file already is a .def file (1st line
-	# is EXPORTS), use it as is; otherwise, prepend...
-	archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	  cp $export_symbols $output_objdir/$soname.def;
-	else
-	  echo EXPORTS > $output_objdir/$soname.def;
-	  cat $export_symbols >> $output_objdir/$soname.def;
-	fi~
-	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    haiku*)
-      archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      link_all_deplibs_FC=yes
-      ;;
-
-    interix[3-9]*)
-      hardcode_direct_FC=no
-      hardcode_shlibpath_var_FC=no
-      hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir'
-      export_dynamic_flag_spec_FC='${wl}-E'
-      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
-      # Instead, shared libraries are loaded at an image base (0x10000000 by
-      # default) and relocated if they conflict, which is a slow very memory
-      # consuming and fragmenting process.  To avoid this, we pick a random,
-      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
-      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
-      archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      archive_expsym_cmds_FC='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      ;;
-
-    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
-      tmp_diet=no
-      if test "$host_os" = linux-dietlibc; then
-	case $cc_basename in
-	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
-	esac
-      fi
-      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
-	 && test "$tmp_diet" = no
-      then
-	tmp_addflag=' $pic_flag'
-	tmp_sharedflag='-shared'
-	case $cc_basename,$host_cpu in
-        pgcc*)				# Portland Group C compiler
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag'
-	  ;;
-	pgf77* | pgf90* | pgf95* | pgfortran*)
-					# Portland Group f77 and f90 compilers
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag -Mnomain' ;;
-	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
-	  tmp_addflag=' -i_dynamic' ;;
-	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
-	  tmp_addflag=' -i_dynamic -nofor_main' ;;
-	ifc* | ifort*)			# Intel Fortran compiler
-	  tmp_addflag=' -nofor_main' ;;
-	lf95*)				# Lahey Fortran 8.1
-	  whole_archive_flag_spec_FC=
-	  tmp_sharedflag='--shared' ;;
-	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
-	  tmp_sharedflag='-qmkshrobj'
-	  tmp_addflag= ;;
-	nvcc*)	# Cuda Compiler Driver 2.2
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object_FC=yes
-	  ;;
-	esac
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ C*)			# Sun C 5.9
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object_FC=yes
-	  tmp_sharedflag='-G' ;;
-	*Sun\ F*)			# Sun Fortran 8.3
-	  tmp_sharedflag='-G' ;;
-	esac
-	archive_cmds_FC='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-
-        if test "x$supports_anon_versioning" = xyes; then
-          archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~
-	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	    echo "local: *; };" >> $output_objdir/$libname.ver~
-	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
-        fi
-
-	case $cc_basename in
-	xlf* | bgf* | bgxlf* | mpixlf*)
-	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
-	  whole_archive_flag_spec_FC='--whole-archive$convenience --no-whole-archive'
-	  hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-	  archive_cmds_FC='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
-	  if test "x$supports_anon_versioning" = xyes; then
-	    archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~
-	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	      echo "local: *; };" >> $output_objdir/$libname.ver~
-	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-	  fi
-	  ;;
-	esac
-      else
-        ld_shlibs_FC=no
-      fi
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds_FC='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
-	wlarc=
-      else
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      fi
-      ;;
-
-    solaris*)
-      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
-	ld_shlibs_FC=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: The releases 2.8.* of the GNU linker cannot reliably
-*** create shared libraries on Solaris systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.9.1 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
-      case `$LD -v 2>&1` in
-        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
-	ld_shlibs_FC=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
-*** reliably create shared libraries on SCO systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-	;;
-	*)
-	  # For security reasons, it is highly recommended that you always
-	  # use absolute paths for naming shared libraries, and exclude the
-	  # DT_RUNPATH tag from executables and libraries.  But doing so
-	  # requires that you compile everything twice, which is a pain.
-	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	    hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-	    archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	    archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-	  else
-	    ld_shlibs_FC=no
-	  fi
-	;;
-      esac
-      ;;
-
-    sunos4*)
-      archive_cmds_FC='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      wlarc=
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    *)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-    esac
-
-    if test "$ld_shlibs_FC" = no; then
-      runpath_var=
-      hardcode_libdir_flag_spec_FC=
-      export_dynamic_flag_spec_FC=
-      whole_archive_flag_spec_FC=
-    fi
-  else
-    # PORTME fill in a description of your system's linker (not GNU ld)
-    case $host_os in
-    aix3*)
-      allow_undefined_flag_FC=unsupported
-      always_export_symbols_FC=yes
-      archive_expsym_cmds_FC='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
-      # Note: this linker hardcodes the directories in LIBPATH if there
-      # are no directories specified by -L.
-      hardcode_minus_L_FC=yes
-      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
-	# Neither direct hardcoding nor static linking is supported with a
-	# broken collect2.
-	hardcode_direct_FC=unsupported
-      fi
-      ;;
-
-    aix[4-9]*)
-      if test "$host_cpu" = ia64; then
-	# On IA64, the linker does run time linking by default, so we don't
-	# have to do anything special.
-	aix_use_runtimelinking=no
-	exp_sym_flag='-Bexport'
-	no_entry_flag=""
-      else
-	# If we're using GNU nm, then we don't want the "-C" option.
-	# -C means demangle to AIX nm, but means don't demangle with GNU nm
-	# Also, AIX nm treats weak defined symbols like other global
-	# defined symbols, whereas GNU nm marks them as "W".
-	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
-	  export_symbols_cmds_FC='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	else
-	  export_symbols_cmds_FC='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	fi
-	aix_use_runtimelinking=no
-
-	# Test if we are trying to use run time linking or normal
-	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
-	# need to do runtime linking.
-	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
-	  for ld_flag in $LDFLAGS; do
-	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
-	    aix_use_runtimelinking=yes
-	    break
-	  fi
-	  done
-	  ;;
-	esac
-
-	exp_sym_flag='-bexport'
-	no_entry_flag='-bnoentry'
-      fi
-
-      # When large executables or shared objects are built, AIX ld can
-      # have problems creating the table of contents.  If linking a library
-      # or program results in "error TOC overflow" add -mminimal-toc to
-      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
-      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
-      archive_cmds_FC=''
-      hardcode_direct_FC=yes
-      hardcode_direct_absolute_FC=yes
-      hardcode_libdir_separator_FC=':'
-      link_all_deplibs_FC=yes
-      file_list_spec_FC='${wl}-f,'
-
-      if test "$GCC" = yes; then
-	case $host_os in aix4.[012]|aix4.[012].*)
-	# We only want to do this on AIX 4.2 and lower, the check
-	# below for broken collect2 doesn't work under 4.3+
-	  collect2name=`${CC} -print-prog-name=collect2`
-	  if test -f "$collect2name" &&
-	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
-	  then
-	  # We have reworked collect2
-	  :
-	  else
-	  # We have old collect2
-	  hardcode_direct_FC=unsupported
-	  # It fails to find uninstalled libraries when the uninstalled
-	  # path is not listed in the libpath.  Setting hardcode_minus_L
-	  # to unsupported forces relinking
-	  hardcode_minus_L_FC=yes
-	  hardcode_libdir_flag_spec_FC='-L$libdir'
-	  hardcode_libdir_separator_FC=
-	  fi
-	  ;;
-	esac
-	shared_flag='-shared'
-	if test "$aix_use_runtimelinking" = yes; then
-	  shared_flag="$shared_flag "'${wl}-G'
-	fi
-      else
-	# not using gcc
-	if test "$host_cpu" = ia64; then
-	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
-	# chokes on -Wl,-G. The following line is correct:
-	  shared_flag='-G'
-	else
-	  if test "$aix_use_runtimelinking" = yes; then
-	    shared_flag='${wl}-G'
-	  else
-	    shared_flag='${wl}-bM:SRE'
-	  fi
-	fi
-      fi
-
-      export_dynamic_flag_spec_FC='${wl}-bexpall'
-      # It seems that -bexpall does not export symbols beginning with
-      # underscore (_), so it is better to generate a list of symbols to export.
-      always_export_symbols_FC=yes
-      if test "$aix_use_runtimelinking" = yes; then
-	# Warning - without using the other runtime loading flags (-brtl),
-	# -berok will link without error, but may produce a broken library.
-	allow_undefined_flag_FC='-berok'
-        # Determine the default libpath from the value encoded in an
-        # empty executable.
-        if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath__FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath__FC
-fi
-
-        hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath"
-        archive_expsym_cmds_FC='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-      else
-	if test "$host_cpu" = ia64; then
-	  hardcode_libdir_flag_spec_FC='${wl}-R $libdir:/usr/lib:/lib'
-	  allow_undefined_flag_FC="-z nodefs"
-	  archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
-	else
-	 # Determine the default libpath from the value encoded in an
-	 # empty executable.
-	 if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath__FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath__FC
-fi
-
-	 hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath"
-	  # Warning - without using the other run time loading flags,
-	  # -berok will link without error, but may produce a broken library.
-	  no_undefined_flag_FC=' ${wl}-bernotok'
-	  allow_undefined_flag_FC=' ${wl}-berok'
-	  if test "$with_gnu_ld" = yes; then
-	    # We only use this code for GNU lds that support --whole-archive.
-	    whole_archive_flag_spec_FC='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
-	  else
-	    # Exported symbols can be pulled into shared objects from archives
-	    whole_archive_flag_spec_FC='$convenience'
-	  fi
-	  archive_cmds_need_lc_FC=yes
-	  # This is similar to how AIX traditionally builds its shared libraries.
-	  archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
-	fi
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds_FC=''
-        ;;
-      m68k)
-            archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec_FC='-L$libdir'
-            hardcode_minus_L_FC=yes
-        ;;
-      esac
-      ;;
-
-    bsdi[45]*)
-      export_dynamic_flag_spec_FC=-rdynamic
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # When not using gcc, we currently assume that we are using
-      # Microsoft Visual C++.
-      # hardcode_libdir_flag_spec is actually meaningless, as there is
-      # no search path for DLLs.
-      case $cc_basename in
-      cl*)
-	# Native MSVC
-	hardcode_libdir_flag_spec_FC=' '
-	allow_undefined_flag_FC=unsupported
-	always_export_symbols_FC=yes
-	file_list_spec_FC='@'
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds_FC='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-	archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-	  else
-	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-	  fi~
-	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-	  linknames='
-	# The linker will not automatically build a static lib if we build a DLL.
-	# _LT_TAGVAR(old_archive_from_new_cmds, FC)='true'
-	enable_shared_with_static_runtimes_FC=yes
-	exclude_expsyms_FC='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
-	export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-	# Don't use ranlib
-	old_postinstall_cmds_FC='chmod 644 $oldlib'
-	postlink_cmds_FC='lt_outputfile="@OUTPUT@"~
-	  lt_tool_outputfile="@TOOL_OUTPUT@"~
-	  case $lt_outputfile in
-	    *.exe|*.EXE) ;;
-	    *)
-	      lt_outputfile="$lt_outputfile.exe"
-	      lt_tool_outputfile="$lt_tool_outputfile.exe"
-	      ;;
-	  esac~
-	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-	    $RM "$lt_outputfile.manifest";
-	  fi'
-	;;
-      *)
-	# Assume MSVC wrapper
-	hardcode_libdir_flag_spec_FC=' '
-	allow_undefined_flag_FC=unsupported
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds_FC='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-	# The linker will automatically build a .lib file if we build a DLL.
-	old_archive_from_new_cmds_FC='true'
-	# FIXME: Should let the user specify the lib program.
-	old_archive_cmds_FC='lib -OUT:$oldlib$oldobjs$old_deplibs'
-	enable_shared_with_static_runtimes_FC=yes
-	;;
-      esac
-      ;;
-
-    darwin* | rhapsody*)
-
-
-  archive_cmds_need_lc_FC=no
-  hardcode_direct_FC=no
-  hardcode_automatic_FC=yes
-  hardcode_shlibpath_var_FC=unsupported
-  if test "$lt_cv_ld_force_load" = "yes"; then
-    whole_archive_flag_spec_FC='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
-    compiler_needs_object_FC=yes
-  else
-    whole_archive_flag_spec_FC=''
-  fi
-  link_all_deplibs_FC=yes
-  allow_undefined_flag_FC="$_lt_dar_allow_undefined"
-  case $cc_basename in
-     ifort*) _lt_dar_can_shared=yes ;;
-     *) _lt_dar_can_shared=$GCC ;;
-  esac
-  if test "$_lt_dar_can_shared" = "yes"; then
-    output_verbose_link_cmd=func_echo_all
-    archive_cmds_FC="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
-    module_cmds_FC="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
-    archive_expsym_cmds_FC="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
-    module_expsym_cmds_FC="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
-
-  else
-  ld_shlibs_FC=no
-  fi
-
-      ;;
-
-    dgux*)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
-    # support.  Future versions do this automatically, but an explicit c++rt0.o
-    # does not break anything, and helps significantly (at the cost of a little
-    # extra space).
-    freebsd2.2*)
-      archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
-    freebsd2.*)
-      archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct_FC=yes
-      hardcode_minus_L_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
-    freebsd* | dragonfly*)
-      archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    hpux9*)
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      else
-	archive_cmds_FC='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      fi
-      hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      hardcode_direct_FC=yes
-
-      # hardcode_minus_L: Not really in the search PATH,
-      # but as the default location of the library.
-      hardcode_minus_L_FC=yes
-      export_dynamic_flag_spec_FC='${wl}-E'
-      ;;
-
-    hpux10*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator_FC=:
-	hardcode_direct_FC=yes
-	hardcode_direct_absolute_FC=yes
-	export_dynamic_flag_spec_FC='${wl}-E'
-	# hardcode_minus_L: Not really in the search PATH,
-	# but as the default location of the library.
-	hardcode_minus_L_FC=yes
-      fi
-      ;;
-
-    hpux11*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds_FC='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-	  archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	esac
-      else
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-	archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	esac
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator_FC=:
-
-	case $host_cpu in
-	hppa*64*|ia64*)
-	  hardcode_direct_FC=no
-	  hardcode_shlibpath_var_FC=no
-	  ;;
-	*)
-	  hardcode_direct_FC=yes
-	  hardcode_direct_absolute_FC=yes
-	  export_dynamic_flag_spec_FC='${wl}-E'
-
-	  # hardcode_minus_L: Not really in the search PATH,
-	  # but as the default location of the library.
-	  hardcode_minus_L_FC=yes
-	  ;;
-	esac
-      fi
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	# Try to use the -exported_symbol ld option, if it does not
-	# work, assume that -exports_file does not work either and
-	# implicitly export all symbols.
-	# This should be the same for all languages, so no per-tag cache variable.
-	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-if ${lt_cv_irix_exported_symbol+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  save_LDFLAGS="$LDFLAGS"
-	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-	   cat > conftest.$ac_ext <<_ACEOF
-
-      subroutine foo
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-  lt_cv_irix_exported_symbol=yes
-else
-  lt_cv_irix_exported_symbol=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-           LDFLAGS="$save_LDFLAGS"
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-	if test "$lt_cv_irix_exported_symbol" = yes; then
-          archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-	fi
-      else
-	archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-      fi
-      archive_cmds_need_lc_FC='no'
-      hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      inherit_rpath_FC=yes
-      link_all_deplibs_FC=yes
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
-      else
-	archive_cmds_FC='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
-      fi
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    newsos6)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct_FC=yes
-      hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    *nto* | *qnx*)
-      ;;
-
-    openbsd*)
-      if test -f /usr/libexec/ld.so; then
-	hardcode_direct_FC=yes
-	hardcode_shlibpath_var_FC=no
-	hardcode_direct_absolute_FC=yes
-	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-	  archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
-	  hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir'
-	  export_dynamic_flag_spec_FC='${wl}-E'
-	else
-	  case $host_os in
-	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
-	     archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-	     hardcode_libdir_flag_spec_FC='-R$libdir'
-	     ;;
-	   *)
-	     archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	     hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir'
-	     ;;
-	  esac
-	fi
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    os2*)
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_minus_L_FC=yes
-      allow_undefined_flag_FC=unsupported
-      archive_cmds_FC='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
-      old_archive_from_new_cmds_FC='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
-      ;;
-
-    osf3*)
-      if test "$GCC" = yes; then
-	allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-      else
-	allow_undefined_flag_FC=' -expect_unresolved \*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-      fi
-      archive_cmds_need_lc_FC='no'
-      hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      ;;
-
-    osf4* | osf5*)	# as osf3* with the addition of -msym flag
-      if test "$GCC" = yes; then
-	allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      else
-	allow_undefined_flag_FC=' -expect_unresolved \*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds_FC='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
-	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
-
-	# Both c and cxx compiler support -rpath directly
-	hardcode_libdir_flag_spec_FC='-rpath $libdir'
-      fi
-      archive_cmds_need_lc_FC='no'
-      hardcode_libdir_separator_FC=:
-      ;;
-
-    solaris*)
-      no_undefined_flag_FC=' -z defs'
-      if test "$GCC" = yes; then
-	wlarc='${wl}'
-	archive_cmds_FC='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-      else
-	case `$CC -V 2>&1` in
-	*"Compilers 5.0"*)
-	  wlarc=''
-	  archive_cmds_FC='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
-	  ;;
-	*)
-	  wlarc='${wl}'
-	  archive_cmds_FC='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-	  ;;
-	esac
-      fi
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_shlibpath_var_FC=no
-      case $host_os in
-      solaris2.[0-5] | solaris2.[0-5].*) ;;
-      *)
-	# The compiler driver will combine and reorder linker options,
-	# but understands `-z linker_flag'.  GCC discards it without `$wl',
-	# but is careful enough not to reorder.
-	# Supported since Solaris 2.6 (maybe 2.5.1?)
-	if test "$GCC" = yes; then
-	  whole_archive_flag_spec_FC='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
-	else
-	  whole_archive_flag_spec_FC='-z allextract$convenience -z defaultextract'
-	fi
-	;;
-      esac
-      link_all_deplibs_FC=yes
-      ;;
-
-    sunos4*)
-      if test "x$host_vendor" = xsequent; then
-	# Use $CC to link under sequent, because it throws in some extra .o
-	# files that make .init and .fini sections work.
-	archive_cmds_FC='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_direct_FC=yes
-      hardcode_minus_L_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    sysv4)
-      case $host_vendor in
-	sni)
-	  archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct_FC=yes # is this really true???
-	;;
-	siemens)
-	  ## LD is ld it makes a PLAMLIB
-	  ## CC just makes a GrossModule.
-	  archive_cmds_FC='$LD -G -o $lib $libobjs $deplibs $linker_flags'
-	  reload_cmds_FC='$CC -r -o $output$reload_objs'
-	  hardcode_direct_FC=no
-        ;;
-	motorola)
-	  archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct_FC=no #Motorola manual says yes, but my tests say they lie
-	;;
-      esac
-      runpath_var='LD_RUN_PATH'
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    sysv4.3*)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_shlibpath_var_FC=no
-      export_dynamic_flag_spec_FC='-Bexport'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	hardcode_shlibpath_var_FC=no
-	runpath_var=LD_RUN_PATH
-	hardcode_runpath_var=yes
-	ld_shlibs_FC=yes
-      fi
-      ;;
-
-    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
-      no_undefined_flag_FC='${wl}-z,text'
-      archive_cmds_need_lc_FC=no
-      hardcode_shlibpath_var_FC=no
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6*)
-      # Note: We can NOT use -z defs as we might desire, because we do not
-      # link with -lc, and that would cause any symbols used from libc to
-      # always be unresolved, which means just about no library would
-      # ever link correctly.  If we're not using GNU ld we use -z text
-      # though, which does catch some bad symbols but isn't as heavy-handed
-      # as -z defs.
-      no_undefined_flag_FC='${wl}-z,text'
-      allow_undefined_flag_FC='${wl}-z,nodefs'
-      archive_cmds_need_lc_FC=no
-      hardcode_shlibpath_var_FC=no
-      hardcode_libdir_flag_spec_FC='${wl}-R,$libdir'
-      hardcode_libdir_separator_FC=':'
-      link_all_deplibs_FC=yes
-      export_dynamic_flag_spec_FC='${wl}-Bexport'
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    uts4*)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    *)
-      ld_shlibs_FC=no
-      ;;
-    esac
-
-    if test x$host_vendor = xsni; then
-      case $host in
-      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
-	export_dynamic_flag_spec_FC='${wl}-Blargedynsym'
-	;;
-      esac
-    fi
-  fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_FC" >&5
-$as_echo "$ld_shlibs_FC" >&6; }
-test "$ld_shlibs_FC" = no && can_build_shared=no
-
-with_gnu_ld_FC=$with_gnu_ld
-
-
-
-
-
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$archive_cmds_need_lc_FC" in
-x|xyes)
-  # Assume -lc should be added
-  archive_cmds_need_lc_FC=yes
-
-  if test "$enable_shared" = yes && test "$GCC" = yes; then
-    case $archive_cmds_FC in
-    *'~'*)
-      # FIXME: we may have to deal with multi-command sequences.
-      ;;
-    '$CC '*)
-      # Test whether the compiler implicitly links with -lc since on some
-      # systems, -lgcc has to come before -lc. If gcc already passes -lc
-      # to ld, don't add -lc before -lgcc.
-      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
-$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
-if ${lt_cv_archive_cmds_need_lc_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  $RM conftest*
-	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } 2>conftest.err; then
-	  soname=conftest
-	  lib=conftest
-	  libobjs=conftest.$ac_objext
-	  deplibs=
-	  wl=$lt_prog_compiler_wl_FC
-	  pic_flag=$lt_prog_compiler_pic_FC
-	  compiler_flags=-v
-	  linker_flags=-v
-	  verstring=
-	  output_objdir=.
-	  libname=conftest
-	  lt_save_allow_undefined_flag=$allow_undefined_flag_FC
-	  allow_undefined_flag_FC=
-	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
-  (eval $archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-	  then
-	    lt_cv_archive_cmds_need_lc_FC=no
-	  else
-	    lt_cv_archive_cmds_need_lc_FC=yes
-	  fi
-	  allow_undefined_flag_FC=$lt_save_allow_undefined_flag
-	else
-	  cat conftest.err 1>&5
-	fi
-	$RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_FC" >&5
-$as_echo "$lt_cv_archive_cmds_need_lc_FC" >&6; }
-      archive_cmds_need_lc_FC=$lt_cv_archive_cmds_need_lc_FC
-      ;;
-    esac
-  fi
-  ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
-$as_echo_n "checking dynamic linker characteristics... " >&6; }
-
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
-  shlibpath_var=LIBPATH
-
-  # AIX 3 has no versioning support, so we append a major version to the name.
-  soname_spec='${libname}${release}${shared_ext}$major'
-  ;;
-
-aix[4-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  hardcode_into_libs=yes
-  if test "$host_cpu" = ia64; then
-    # AIX 5 supports IA64
-    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
-    shlibpath_var=LD_LIBRARY_PATH
-  else
-    # With GCC up to 2.95.x, collect2 would create an import file
-    # for dependence libraries.  The import file would start with
-    # the line `#! .'.  This would cause the generated library to
-    # depend on `.', always an invalid library.  This was fixed in
-    # development snapshots of GCC prior to 3.0.
-    case $host_os in
-      aix4 | aix4.[01] | aix4.[01].*)
-      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
-	   echo ' yes '
-	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
-	:
-      else
-	can_build_shared=no
-      fi
-      ;;
-    esac
-    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
-    # soname into executable. Probably we can add versioning support to
-    # collect2, so additional links can be useful in future.
-    if test "$aix_use_runtimelinking" = yes; then
-      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
-      # instead of lib<name>.a to let people know that these are not
-      # typical AIX shared libraries.
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    else
-      # We preserve .a as extension for shared libraries through AIX4.2
-      # and later when we are not doing run time linking.
-      library_names_spec='${libname}${release}.a $libname.a'
-      soname_spec='${libname}${release}${shared_ext}$major'
-    fi
-    shlibpath_var=LIBPATH
-  fi
-  ;;
-
-amigaos*)
-  case $host_cpu in
-  powerpc)
-    # Since July 2007 AmigaOS4 officially supports .so libraries.
-    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    ;;
-  m68k)
-    library_names_spec='$libname.ixlibrary $libname.a'
-    # Create ${libname}_ixlibrary.a entries in /sys/libs.
-    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
-    ;;
-  esac
-  ;;
-
-beos*)
-  library_names_spec='${libname}${shared_ext}'
-  dynamic_linker="$host_os ld.so"
-  shlibpath_var=LIBRARY_PATH
-  ;;
-
-bsdi[45]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
-  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
-  # the default ld.so.conf also contains /usr/contrib/lib and
-  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
-  # libtool to hard-code these into programs
-  ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
-  version_type=windows
-  shrext_cmds=".dll"
-  need_version=no
-  need_lib_prefix=no
-
-  case $GCC,$cc_basename in
-  yes,*)
-    # gcc
-    library_names_spec='$libname.dll.a'
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname~
-      chmod a+x \$dldir/$dlname~
-      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
-        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
-      fi'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-
-    case $host_os in
-    cygwin*)
-      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
-      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-
-      ;;
-    mingw* | cegcc*)
-      # MinGW DLLs use traditional 'lib' prefix
-      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    pw32*)
-      # pw32 DLLs use 'pw' prefix rather than 'lib'
-      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    esac
-    dynamic_linker='Win32 ld.exe'
-    ;;
-
-  *,cl*)
-    # Native MSVC
-    libname_spec='$name'
-    soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-    library_names_spec='${libname}.dll.lib'
-
-    case $build_os in
-    mingw*)
-      sys_lib_search_path_spec=
-      lt_save_ifs=$IFS
-      IFS=';'
-      for lt_path in $LIB
-      do
-        IFS=$lt_save_ifs
-        # Let DOS variable expansion print the short 8.3 style file name.
-        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-      done
-      IFS=$lt_save_ifs
-      # Convert to MSYS style.
-      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-      ;;
-    cygwin*)
-      # Convert to unix form, then to dos form, then back to unix form
-      # but this time dos style (no spaces!) so that the unix form looks
-      # like /cygdrive/c/PROGRA~1:/cygdr...
-      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      ;;
-    *)
-      sys_lib_search_path_spec="$LIB"
-      if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-        # It is most probably a Windows format PATH.
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-      else
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      fi
-      # FIXME: find the short name or the path components, as spaces are
-      # common. (e.g. "Program Files" -> "PROGRA~1")
-      ;;
-    esac
-
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-    dynamic_linker='Win32 link.exe'
-    ;;
-
-  *)
-    # Assume MSVC wrapper
-    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-    dynamic_linker='Win32 ld.exe'
-    ;;
-  esac
-  # FIXME: first we should search . and the directory the executable is in
-  shlibpath_var=PATH
-  ;;
-
-darwin* | rhapsody*)
-  dynamic_linker="$host_os dyld"
-  version_type=darwin
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
-  soname_spec='${libname}${release}${major}$shared_ext'
-  shlibpath_overrides_runpath=yes
-  shlibpath_var=DYLD_LIBRARY_PATH
-  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-
-  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
-  ;;
-
-dgux*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-freebsd* | dragonfly*)
-  # DragonFly does not have aout.  When/if they implement a new
-  # versioning mechanism, adjust this.
-  if test -x /usr/bin/objformat; then
-    objformat=`/usr/bin/objformat`
-  else
-    case $host_os in
-    freebsd[23].*) objformat=aout ;;
-    *) objformat=elf ;;
-    esac
-  fi
-  version_type=freebsd-$objformat
-  case $version_type in
-    freebsd-elf*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-      need_version=no
-      need_lib_prefix=no
-      ;;
-    freebsd-*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
-      need_version=yes
-      ;;
-  esac
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_os in
-  freebsd2.*)
-    shlibpath_overrides_runpath=yes
-    ;;
-  freebsd3.[01]* | freebsdelf3.[01]*)
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
-  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
-    shlibpath_overrides_runpath=no
-    hardcode_into_libs=yes
-    ;;
-  *) # from 4.6 on, and DragonFly
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  esac
-  ;;
-
-gnu*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-haiku*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  dynamic_linker="$host_os runtime_loader"
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
-  hardcode_into_libs=yes
-  ;;
-
-hpux9* | hpux10* | hpux11*)
-  # Give a soname corresponding to the major version so that dld.sl refuses to
-  # link against other versions.
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  case $host_cpu in
-  ia64*)
-    shrext_cmds='.so'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.so"
-    shlibpath_var=LD_LIBRARY_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    if test "X$HPUX_IA64_MODE" = X32; then
-      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
-    else
-      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
-    fi
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  hppa*64*)
-    shrext_cmds='.sl'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  *)
-    shrext_cmds='.sl'
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=SHLIB_PATH
-    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    ;;
-  esac
-  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
-  postinstall_cmds='chmod 555 $lib'
-  # or fails outright, so override atomically:
-  install_override_mode=555
-  ;;
-
-interix[3-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-irix5* | irix6* | nonstopux*)
-  case $host_os in
-    nonstopux*) version_type=nonstopux ;;
-    *)
-	if test "$lt_cv_prog_gnu_ld" = yes; then
-		version_type=linux # correct to gnu/linux during the next big refactor
-	else
-		version_type=irix
-	fi ;;
-  esac
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
-  case $host_os in
-  irix5* | nonstopux*)
-    libsuff= shlibsuff=
-    ;;
-  *)
-    case $LD in # libtool.m4 will add one of these switches to LD
-    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
-      libsuff= shlibsuff= libmagic=32-bit;;
-    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
-      libsuff=32 shlibsuff=N32 libmagic=N32;;
-    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
-      libsuff=64 shlibsuff=64 libmagic=64-bit;;
-    *) libsuff= shlibsuff= libmagic=never-match;;
-    esac
-    ;;
-  esac
-  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
-  shlibpath_overrides_runpath=no
-  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
-  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
-  hardcode_into_libs=yes
-  ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
-  dynamic_linker=no
-  ;;
-
-# This must be glibc/ELF.
-linux* | k*bsd*-gnu | kopensolaris*-gnu)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-
-  # Some binutils ld are patched to set DT_RUNPATH
-  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_shlibpath_overrides_runpath=no
-    save_LDFLAGS=$LDFLAGS
-    save_libdir=$libdir
-    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_FC\"; \
-	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_FC\""
-    cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
-  lt_cv_shlibpath_overrides_runpath=yes
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-    LDFLAGS=$save_LDFLAGS
-    libdir=$save_libdir
-
-fi
-
-  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
-
-  # This implies no fast_install, which is unacceptable.
-  # Some rework will be needed to allow for fast_install
-  # before this can be enabled.
-  hardcode_into_libs=yes
-
-  # Append ld.so.conf contents to the search path
-  if test -f /etc/ld.so.conf; then
-    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
-    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
-  fi
-
-  # We used to test for /lib/ld.so.1 and disable shared libraries on
-  # powerpc, because MkLinux only supported shared libraries with the
-  # GNU dynamic linker.  Since this was broken with cross compilers,
-  # most powerpc-linux boxes support dynamic linking these days and
-  # people can always --disable-shared, the test was removed, and we
-  # assume the GNU/Linux dynamic linker is in use.
-  dynamic_linker='GNU/Linux ld.so'
-  ;;
-
-netbsd*)
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-    dynamic_linker='NetBSD (a.out) ld.so'
-  else
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    dynamic_linker='NetBSD ld.elf_so'
-  fi
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  ;;
-
-newsos6)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  ;;
-
-*nto* | *qnx*)
-  version_type=qnx
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  dynamic_linker='ldqnx.so'
-  ;;
-
-openbsd*)
-  version_type=sunos
-  sys_lib_dlsearch_path_spec="/usr/lib"
-  need_lib_prefix=no
-  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
-  case $host_os in
-    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
-    *)				need_version=no  ;;
-  esac
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-    case $host_os in
-      openbsd2.[89] | openbsd2.[89].*)
-	shlibpath_overrides_runpath=no
-	;;
-      *)
-	shlibpath_overrides_runpath=yes
-	;;
-      esac
-  else
-    shlibpath_overrides_runpath=yes
-  fi
-  ;;
-
-os2*)
-  libname_spec='$name'
-  shrext_cmds=".dll"
-  need_lib_prefix=no
-  library_names_spec='$libname${shared_ext} $libname.a'
-  dynamic_linker='OS/2 ld.exe'
-  shlibpath_var=LIBPATH
-  ;;
-
-osf3* | osf4* | osf5*)
-  version_type=osf
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
-  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
-  ;;
-
-rdos*)
-  dynamic_linker=no
-  ;;
-
-solaris*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  # ldd complains unless libraries are executable
-  postinstall_cmds='chmod +x $lib'
-  ;;
-
-sunos4*)
-  version_type=sunos
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  if test "$with_gnu_ld" = yes; then
-    need_lib_prefix=no
-  fi
-  need_version=yes
-  ;;
-
-sysv4 | sysv4.3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_vendor in
-    sni)
-      shlibpath_overrides_runpath=no
-      need_lib_prefix=no
-      runpath_var=LD_RUN_PATH
-      ;;
-    siemens)
-      need_lib_prefix=no
-      ;;
-    motorola)
-      need_lib_prefix=no
-      need_version=no
-      shlibpath_overrides_runpath=no
-      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
-      ;;
-  esac
-  ;;
-
-sysv4*MP*)
-  if test -d /usr/nec ;then
-    version_type=linux # correct to gnu/linux during the next big refactor
-    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
-    soname_spec='$libname${shared_ext}.$major'
-    shlibpath_var=LD_LIBRARY_PATH
-  fi
-  ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
-  version_type=freebsd-elf
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  if test "$with_gnu_ld" = yes; then
-    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
-  else
-    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
-    case $host_os in
-      sco3.2v5*)
-        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
-	;;
-    esac
-  fi
-  sys_lib_dlsearch_path_spec='/usr/lib'
-  ;;
-
-tpf*)
-  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-uts4*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-*)
-  dynamic_linker=no
-  ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
-$as_echo "$dynamic_linker" >&6; }
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
-  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
-  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
-  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
-$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
-hardcode_action_FC=
-if test -n "$hardcode_libdir_flag_spec_FC" ||
-   test -n "$runpath_var_FC" ||
-   test "X$hardcode_automatic_FC" = "Xyes" ; then
-
-  # We can hardcode non-existent directories.
-  if test "$hardcode_direct_FC" != no &&
-     # If the only mechanism to avoid hardcoding is shlibpath_var, we
-     # have to relink, otherwise we might link with an installed library
-     # when we should be linking with a yet-to-be-installed one
-     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, FC)" != no &&
-     test "$hardcode_minus_L_FC" != no; then
-    # Linking always hardcodes the temporary library directory.
-    hardcode_action_FC=relink
-  else
-    # We can link without hardcoding, and we can hardcode nonexisting dirs.
-    hardcode_action_FC=immediate
-  fi
-else
-  # We cannot hardcode anything, or else we can only hardcode existing
-  # directories.
-  hardcode_action_FC=unsupported
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_FC" >&5
-$as_echo "$hardcode_action_FC" >&6; }
-
-if test "$hardcode_action_FC" = relink ||
-   test "$inherit_rpath_FC" = yes; then
-  # Fast installation is not supported
-  enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
-     test "$enable_shared" = no; then
-  # Fast installation is not necessary
-  enable_fast_install=needless
-fi
-
-
-
-
-
-
-
-  fi # test -n "$compiler"
-
-  GCC=$lt_save_GCC
-  CC=$lt_save_CC
-  CFLAGS=$lt_save_CFLAGS
-fi # test "$_lt_disable_FC" != yes
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-
-
-
-
-
-
-
-
-
-        ac_config_commands="$ac_config_commands libtool"
-
-
-
-
-# Only expand once:
-
-
-
-ac_config_files="$ac_config_files Makefile lib/Makefile lib/xmlFailures/Makefile lib/xmlSuccesses/Makefile prog/Makefile"
-
-cat >confcache <<\_ACEOF
-# This file is a shell script that caches the results of configure
-# tests run on this system so they can be shared between configure
-# scripts and configure runs, see configure's option --config-cache.
-# It is not useful on other systems.  If it contains results you don't
-# want to keep, you may remove or edit it.
-#
-# config.status only pays attention to the cache file if you give it
-# the --recheck option to rerun configure.
-#
-# `ac_cv_env_foo' variables (set or unset) will be overridden when
-# loading this file, other *unset* `ac_cv_foo' will be assigned the
-# following values.
-
-_ACEOF
-
-# The following way of writing the cache mishandles newlines in values,
-# but we know of no workaround that is simple, portable, and efficient.
-# So, we kill variables containing newlines.
-# Ultrix sh set writes to stderr and can't be redirected directly,
-# and sets the high bit in the cache file unless we assign to the vars.
-(
-  for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
-    eval ac_val=\$$ac_var
-    case $ac_val in #(
-    *${as_nl}*)
-      case $ac_var in #(
-      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
-      esac
-      case $ac_var in #(
-      _ | IFS | as_nl) ;; #(
-      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
-      *) { eval $ac_var=; unset $ac_var;} ;;
-      esac ;;
-    esac
-  done
-
-  (set) 2>&1 |
-    case $as_nl`(ac_space=' '; set) 2>&1` in #(
-    *${as_nl}ac_space=\ *)
-      # `set' does not quote correctly, so add quotes: double-quote
-      # substitution turns \\\\ into \\, and sed turns \\ into \.
-      sed -n \
-	"s/'/'\\\\''/g;
-	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
-      ;; #(
-    *)
-      # `set' quotes correctly as required by POSIX, so do not add quotes.
-      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
-      ;;
-    esac |
-    sort
-) |
-  sed '
-     /^ac_cv_env_/b end
-     t clear
-     :clear
-     s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
-     t end
-     s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
-     :end' >>confcache
-if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
-  if test -w "$cache_file"; then
-    if test "x$cache_file" != "x/dev/null"; then
-      { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
-$as_echo "$as_me: updating cache $cache_file" >&6;}
-      if test ! -f "$cache_file" || test -h "$cache_file"; then
-	cat confcache >"$cache_file"
-      else
-        case $cache_file in #(
-        */* | ?:*)
-	  mv -f confcache "$cache_file"$$ &&
-	  mv -f "$cache_file"$$ "$cache_file" ;; #(
-        *)
-	  mv -f confcache "$cache_file" ;;
-	esac
-      fi
-    fi
-  else
-    { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
-$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
-  fi
-fi
-rm -f confcache
-
-test "x$prefix" = xNONE && prefix=$ac_default_prefix
-# Let make expand exec_prefix.
-test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
-
-DEFS=-DHAVE_CONFIG_H
-
-ac_libobjs=
-ac_ltlibobjs=
-U=
-for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
-  # 1. Remove the extension, and $U if already installed.
-  ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
-  ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
-  # 2. Prepend LIBOBJDIR.  When used with automake>=1.10 LIBOBJDIR
-  #    will be set to the directory where LIBOBJS objects are built.
-  as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
-  as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
-done
-LIBOBJS=$ac_libobjs
-
-LTLIBOBJS=$ac_ltlibobjs
-
-
- if test -n "$EXEEXT"; then
-  am__EXEEXT_TRUE=
-  am__EXEEXT_FALSE='#'
-else
-  am__EXEEXT_TRUE='#'
-  am__EXEEXT_FALSE=
-fi
-
-if test -z "${DEBUG_TRUE}" && test -z "${DEBUG_FALSE}"; then
-  as_fn_error $? "conditional \"DEBUG\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then
-  as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then
-  as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
-  as_fn_error $? "conditional \"AMDEP\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
-  as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${HAVE_CUNIT_TRUE}" && test -z "${HAVE_CUNIT_FALSE}"; then
-  as_fn_error $? "conditional \"HAVE_CUNIT\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-
-: "${CONFIG_STATUS=./config.status}"
-ac_write_fail=0
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files $CONFIG_STATUS"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
-$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
-as_write_fail=0
-cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
-#! $SHELL
-# Generated by $as_me.
-# Run this file to recreate the current configuration.
-# Compiler output produced by configure, useful for debugging
-# configure, is in config.log if it exists.
-
-debug=false
-ac_cs_recheck=false
-ac_cs_silent=false
-
-SHELL=\${CONFIG_SHELL-$SHELL}
-export SHELL
-_ASEOF
-cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
-## -------------------- ##
-## M4sh Initialization. ##
-## -------------------- ##
-
-# Be more Bourne compatible
-DUALCASE=1; export DUALCASE # for MKS sh
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
-  emulate sh
-  NULLCMD=:
-  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
-  # is contrary to our usage.  Disable this feature.
-  alias -g '${1+"$@"}'='"$@"'
-  setopt NO_GLOB_SUBST
-else
-  case `(set -o) 2>/dev/null` in #(
-  *posix*) :
-    set -o posix ;; #(
-  *) :
-     ;;
-esac
-fi
-
-
-as_nl='
-'
-export as_nl
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
-# Prefer a ksh shell builtin over an external printf program on Solaris,
-# but without wasting forks for bash or zsh.
-if test -z "$BASH_VERSION$ZSH_VERSION" \
-    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='print -r --'
-  as_echo_n='print -rn --'
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='printf %s\n'
-  as_echo_n='printf %s'
-else
-  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
-    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
-    as_echo_n='/usr/ucb/echo -n'
-  else
-    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
-    as_echo_n_body='eval
-      arg=$1;
-      case $arg in #(
-      *"$as_nl"*)
-	expr "X$arg" : "X\\(.*\\)$as_nl";
-	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
-      esac;
-      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
-    '
-    export as_echo_n_body
-    as_echo_n='sh -c $as_echo_n_body as_echo'
-  fi
-  export as_echo_body
-  as_echo='sh -c $as_echo_body as_echo'
-fi
-
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
-  PATH_SEPARATOR=:
-  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
-    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
-      PATH_SEPARATOR=';'
-  }
-fi
-
-
-# IFS
-# We need space, tab and new line, in precisely that order.  Quoting is
-# there to prevent editors from complaining about space-tab.
-# (If _AS_PATH_WALK were called with IFS unset, it would disable word
-# splitting by setting IFS to empty value.)
-IFS=" ""	$as_nl"
-
-# Find who we are.  Look in the path if we contain no directory separator.
-as_myself=
-case $0 in #((
-  *[\\/]* ) as_myself=$0 ;;
-  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
-  done
-IFS=$as_save_IFS
-
-     ;;
-esac
-# We did not find ourselves, most probably we were run as `sh COMMAND'
-# in which case we are not to be found in the path.
-if test "x$as_myself" = x; then
-  as_myself=$0
-fi
-if test ! -f "$as_myself"; then
-  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
-  exit 1
-fi
-
-# Unset variables that we do not need and which cause bugs (e.g. in
-# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
-# suppresses any "Segmentation fault" message there.  '((' could
-# trigger a bug in pdksh 5.2.14.
-for as_var in BASH_ENV ENV MAIL MAILPATH
-do eval test x\${$as_var+set} = xset \
-  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# NLS nuisances.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# CDPATH.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
-# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
-# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
-as_fn_error ()
-{
-  as_status=$1; test $as_status -eq 0 && as_status=1
-  if test "$4"; then
-    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
-  fi
-  $as_echo "$as_me: error: $2" >&2
-  as_fn_exit $as_status
-} # as_fn_error
-
-
-# as_fn_set_status STATUS
-# -----------------------
-# Set $? to STATUS, without forking.
-as_fn_set_status ()
-{
-  return $1
-} # as_fn_set_status
-
-# as_fn_exit STATUS
-# -----------------
-# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
-as_fn_exit ()
-{
-  set +e
-  as_fn_set_status $1
-  exit $1
-} # as_fn_exit
-
-# as_fn_unset VAR
-# ---------------
-# Portably unset VAR.
-as_fn_unset ()
-{
-  { eval $1=; unset $1;}
-}
-as_unset=as_fn_unset
-# as_fn_append VAR VALUE
-# ----------------------
-# Append the text in VALUE to the end of the definition contained in VAR. Take
-# advantage of any shell optimizations that allow amortized linear growth over
-# repeated appends, instead of the typical quadratic growth present in naive
-# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
-  eval 'as_fn_append ()
-  {
-    eval $1+=\$2
-  }'
-else
-  as_fn_append ()
-  {
-    eval $1=\$$1\$2
-  }
-fi # as_fn_append
-
-# as_fn_arith ARG...
-# ------------------
-# Perform arithmetic evaluation on the ARGs, and store the result in the
-# global $as_val. Take advantage of shells that can avoid forks. The arguments
-# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
-  eval 'as_fn_arith ()
-  {
-    as_val=$(( $* ))
-  }'
-else
-  as_fn_arith ()
-  {
-    as_val=`expr "$@" || test $? -eq 1`
-  }
-fi # as_fn_arith
-
-
-if expr a : '\(a\)' >/dev/null 2>&1 &&
-   test "X`expr 00001 : '.*\(...\)'`" = X001; then
-  as_expr=expr
-else
-  as_expr=false
-fi
-
-if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
-  as_basename=basename
-else
-  as_basename=false
-fi
-
-if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
-  as_dirname=dirname
-else
-  as_dirname=false
-fi
-
-as_me=`$as_basename -- "$0" ||
-$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
-	 X"$0" : 'X\(//\)$' \| \
-	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X/"$0" |
-    sed '/^.*\/\([^/][^/]*\)\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-
-# Avoid depending upon Character Ranges.
-as_cr_letters='abcdefghijklmnopqrstuvwxyz'
-as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-as_cr_Letters=$as_cr_letters$as_cr_LETTERS
-as_cr_digits='0123456789'
-as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-ECHO_C= ECHO_N= ECHO_T=
-case `echo -n x` in #(((((
--n*)
-  case `echo 'xy\c'` in
-  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
-  xy)  ECHO_C='\c';;
-  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
-       ECHO_T='	';;
-  esac;;
-*)
-  ECHO_N='-n';;
-esac
-
-rm -f conf$$ conf$$.exe conf$$.file
-if test -d conf$$.dir; then
-  rm -f conf$$.dir/conf$$.file
-else
-  rm -f conf$$.dir
-  mkdir conf$$.dir 2>/dev/null
-fi
-if (echo >conf$$.file) 2>/dev/null; then
-  if ln -s conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s='ln -s'
-    # ... but there are two gotchas:
-    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
-    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
-    # In both cases, we have to default to `cp -p'.
-    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
-      as_ln_s='cp -p'
-  elif ln conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s=ln
-  else
-    as_ln_s='cp -p'
-  fi
-else
-  as_ln_s='cp -p'
-fi
-rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
-rmdir conf$$.dir 2>/dev/null
-
-
-# as_fn_mkdir_p
-# -------------
-# Create "$as_dir" as a directory, including parents if necessary.
-as_fn_mkdir_p ()
-{
-
-  case $as_dir in #(
-  -*) as_dir=./$as_dir;;
-  esac
-  test -d "$as_dir" || eval $as_mkdir_p || {
-    as_dirs=
-    while :; do
-      case $as_dir in #(
-      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
-      *) as_qdir=$as_dir;;
-      esac
-      as_dirs="'$as_qdir' $as_dirs"
-      as_dir=`$as_dirname -- "$as_dir" ||
-$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$as_dir" : 'X\(//\)[^/]' \| \
-	 X"$as_dir" : 'X\(//\)$' \| \
-	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_dir" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-      test -d "$as_dir" && break
-    done
-    test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
-
-
-} # as_fn_mkdir_p
-if mkdir -p . 2>/dev/null; then
-  as_mkdir_p='mkdir -p "$as_dir"'
-else
-  test -d ./-p && rmdir ./-p
-  as_mkdir_p=false
-fi
-
-if test -x / >/dev/null 2>&1; then
-  as_test_x='test -x'
-else
-  if ls -dL / >/dev/null 2>&1; then
-    as_ls_L_option=L
-  else
-    as_ls_L_option=
-  fi
-  as_test_x='
-    eval sh -c '\''
-      if test -d "$1"; then
-	test -d "$1/.";
-      else
-	case $1 in #(
-	-*)set "./$1";;
-	esac;
-	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
-	???[sx]*):;;*)false;;esac;fi
-    '\'' sh
-  '
-fi
-as_executable_p=$as_test_x
-
-# Sed expression to map a string onto a valid CPP name.
-as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
-
-# Sed expression to map a string onto a valid variable name.
-as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
-
-
-exec 6>&1
-## ----------------------------------- ##
-## Main body of $CONFIG_STATUS script. ##
-## ----------------------------------- ##
-_ASEOF
-test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# Save the log message, to keep $0 and so on meaningful, and to
-# report actual input values of CONFIG_FILES etc. instead of their
-# values after options handling.
-ac_log="
-This file was extended by UDUNITS $as_me 2.2.17, which was
-generated by GNU Autoconf 2.68.  Invocation command line was
-
-  CONFIG_FILES    = $CONFIG_FILES
-  CONFIG_HEADERS  = $CONFIG_HEADERS
-  CONFIG_LINKS    = $CONFIG_LINKS
-  CONFIG_COMMANDS = $CONFIG_COMMANDS
-  $ $0 $@
-
-on `(hostname || uname -n) 2>/dev/null | sed 1q`
-"
-
-_ACEOF
-
-case $ac_config_files in *"
-"*) set x $ac_config_files; shift; ac_config_files=$*;;
-esac
-
-case $ac_config_headers in *"
-"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
-esac
-
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-# Files that config.status was made for.
-config_files="$ac_config_files"
-config_headers="$ac_config_headers"
-config_commands="$ac_config_commands"
-
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-ac_cs_usage="\
-\`$as_me' instantiates files and other configuration actions
-from templates according to the current configuration.  Unless the files
-and actions are specified as TAGs, all are instantiated by default.
-
-Usage: $0 [OPTION]... [TAG]...
-
-  -h, --help       print this help, then exit
-  -V, --version    print version number and configuration settings, then exit
-      --config     print configuration, then exit
-  -q, --quiet, --silent
-                   do not print progress messages
-  -d, --debug      don't remove temporary files
-      --recheck    update $as_me by reconfiguring in the same conditions
-      --file=FILE[:TEMPLATE]
-                   instantiate the configuration file FILE
-      --header=FILE[:TEMPLATE]
-                   instantiate the configuration header FILE
-
-Configuration files:
-$config_files
-
-Configuration headers:
-$config_headers
-
-Configuration commands:
-$config_commands
-
-Report bugs to <support-udunits@unidata.ucar.edu>."
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
-ac_cs_version="\\
-UDUNITS config.status 2.2.17
-configured by $0, generated by GNU Autoconf 2.68,
-  with options \\"\$ac_cs_config\\"
-
-Copyright (C) 2010 Free Software Foundation, Inc.
-This config.status script is free software; the Free Software Foundation
-gives unlimited permission to copy, distribute and modify it."
-
-ac_pwd='$ac_pwd'
-srcdir='$srcdir'
-INSTALL='$INSTALL'
-MKDIR_P='$MKDIR_P'
-AWK='$AWK'
-test -n "\$AWK" || AWK=awk
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# The default lists apply if the user does not specify any file.
-ac_need_defaults=:
-while test $# != 0
-do
-  case $1 in
-  --*=?*)
-    ac_option=`expr "X$1" : 'X\([^=]*\)='`
-    ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
-    ac_shift=:
-    ;;
-  --*=)
-    ac_option=`expr "X$1" : 'X\([^=]*\)='`
-    ac_optarg=
-    ac_shift=:
-    ;;
-  *)
-    ac_option=$1
-    ac_optarg=$2
-    ac_shift=shift
-    ;;
-  esac
-
-  case $ac_option in
-  # Handling of the options.
-  -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
-    ac_cs_recheck=: ;;
-  --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
-    $as_echo "$ac_cs_version"; exit ;;
-  --config | --confi | --conf | --con | --co | --c )
-    $as_echo "$ac_cs_config"; exit ;;
-  --debug | --debu | --deb | --de | --d | -d )
-    debug=: ;;
-  --file | --fil | --fi | --f )
-    $ac_shift
-    case $ac_optarg in
-    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    '') as_fn_error $? "missing file argument" ;;
-    esac
-    as_fn_append CONFIG_FILES " '$ac_optarg'"
-    ac_need_defaults=false;;
-  --header | --heade | --head | --hea )
-    $ac_shift
-    case $ac_optarg in
-    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    esac
-    as_fn_append CONFIG_HEADERS " '$ac_optarg'"
-    ac_need_defaults=false;;
-  --he | --h)
-    # Conflict between --help and --header
-    as_fn_error $? "ambiguous option: \`$1'
-Try \`$0 --help' for more information.";;
-  --help | --hel | -h )
-    $as_echo "$ac_cs_usage"; exit ;;
-  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
-  | -silent | --silent | --silen | --sile | --sil | --si | --s)
-    ac_cs_silent=: ;;
-
-  # This is an error.
-  -*) as_fn_error $? "unrecognized option: \`$1'
-Try \`$0 --help' for more information." ;;
-
-  *) as_fn_append ac_config_targets " $1"
-     ac_need_defaults=false ;;
-
-  esac
-  shift
-done
-
-ac_configure_extra_args=
-
-if $ac_cs_silent; then
-  exec 6>/dev/null
-  ac_configure_extra_args="$ac_configure_extra_args --silent"
-fi
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-if \$ac_cs_recheck; then
-  set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
-  shift
-  \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
-  CONFIG_SHELL='$SHELL'
-  export CONFIG_SHELL
-  exec "\$@"
-fi
-
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-exec 5>>config.log
-{
-  echo
-  sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
-## Running $as_me. ##
-_ASBOX
-  $as_echo "$ac_log"
-} >&5
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-#
-# INIT-COMMANDS
-#
-AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
-
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-sed_quote_subst='$sed_quote_subst'
-double_quote_subst='$double_quote_subst'
-delay_variable_subst='$delay_variable_subst'
-macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`'
-macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`'
-enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`'
-enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`'
-pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`'
-enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`'
-SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`'
-ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`'
-PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`'
-host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`'
-host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`'
-host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`'
-build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`'
-build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`'
-build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`'
-SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`'
-Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`'
-GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`'
-EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`'
-FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`'
-LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`'
-NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`'
-LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`'
-max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`'
-ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`'
-exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
-lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
-lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
-lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
-reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
-reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
-OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
-deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
-file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
-AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
-AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
-STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
-RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
-old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
-old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`'
-lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`'
-CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`'
-CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`'
-compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`'
-GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
-objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
-MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
-lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
-need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
-DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
-NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
-LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`'
-OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`'
-libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`'
-shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`'
-extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
-archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`'
-enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`'
-export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`'
-whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`'
-compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`'
-old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`'
-old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
-archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`'
-archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`'
-module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`'
-module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`'
-with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`'
-allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`'
-no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`'
-hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`'
-hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`'
-hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`'
-hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`'
-hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
-inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
-link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
-always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
-export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
-exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
-include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
-prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
-file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
-variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
-need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`'
-version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`'
-runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`'
-shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`'
-shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`'
-libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`'
-library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`'
-soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`'
-install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`'
-postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
-finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`'
-finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`'
-hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`'
-sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`'
-sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`'
-hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`'
-enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`'
-enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`'
-enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`'
-old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`'
-striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`'
-predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`'
-postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`'
-predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`'
-postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`'
-LD_FC='`$ECHO "$LD_FC" | $SED "$delay_single_quote_subst"`'
-reload_flag_FC='`$ECHO "$reload_flag_FC" | $SED "$delay_single_quote_subst"`'
-reload_cmds_FC='`$ECHO "$reload_cmds_FC" | $SED "$delay_single_quote_subst"`'
-old_archive_cmds_FC='`$ECHO "$old_archive_cmds_FC" | $SED "$delay_single_quote_subst"`'
-compiler_FC='`$ECHO "$compiler_FC" | $SED "$delay_single_quote_subst"`'
-GCC_FC='`$ECHO "$GCC_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_no_builtin_flag_FC='`$ECHO "$lt_prog_compiler_no_builtin_flag_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_pic_FC='`$ECHO "$lt_prog_compiler_pic_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_wl_FC='`$ECHO "$lt_prog_compiler_wl_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_static_FC='`$ECHO "$lt_prog_compiler_static_FC" | $SED "$delay_single_quote_subst"`'
-lt_cv_prog_compiler_c_o_FC='`$ECHO "$lt_cv_prog_compiler_c_o_FC" | $SED "$delay_single_quote_subst"`'
-archive_cmds_need_lc_FC='`$ECHO "$archive_cmds_need_lc_FC" | $SED "$delay_single_quote_subst"`'
-enable_shared_with_static_runtimes_FC='`$ECHO "$enable_shared_with_static_runtimes_FC" | $SED "$delay_single_quote_subst"`'
-export_dynamic_flag_spec_FC='`$ECHO "$export_dynamic_flag_spec_FC" | $SED "$delay_single_quote_subst"`'
-whole_archive_flag_spec_FC='`$ECHO "$whole_archive_flag_spec_FC" | $SED "$delay_single_quote_subst"`'
-compiler_needs_object_FC='`$ECHO "$compiler_needs_object_FC" | $SED "$delay_single_quote_subst"`'
-old_archive_from_new_cmds_FC='`$ECHO "$old_archive_from_new_cmds_FC" | $SED "$delay_single_quote_subst"`'
-old_archive_from_expsyms_cmds_FC='`$ECHO "$old_archive_from_expsyms_cmds_FC" | $SED "$delay_single_quote_subst"`'
-archive_cmds_FC='`$ECHO "$archive_cmds_FC" | $SED "$delay_single_quote_subst"`'
-archive_expsym_cmds_FC='`$ECHO "$archive_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`'
-module_cmds_FC='`$ECHO "$module_cmds_FC" | $SED "$delay_single_quote_subst"`'
-module_expsym_cmds_FC='`$ECHO "$module_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`'
-with_gnu_ld_FC='`$ECHO "$with_gnu_ld_FC" | $SED "$delay_single_quote_subst"`'
-allow_undefined_flag_FC='`$ECHO "$allow_undefined_flag_FC" | $SED "$delay_single_quote_subst"`'
-no_undefined_flag_FC='`$ECHO "$no_undefined_flag_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec_FC='`$ECHO "$hardcode_libdir_flag_spec_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_separator_FC='`$ECHO "$hardcode_libdir_separator_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_direct_FC='`$ECHO "$hardcode_direct_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_direct_absolute_FC='`$ECHO "$hardcode_direct_absolute_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_minus_L_FC='`$ECHO "$hardcode_minus_L_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_shlibpath_var_FC='`$ECHO "$hardcode_shlibpath_var_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_automatic_FC='`$ECHO "$hardcode_automatic_FC" | $SED "$delay_single_quote_subst"`'
-inherit_rpath_FC='`$ECHO "$inherit_rpath_FC" | $SED "$delay_single_quote_subst"`'
-link_all_deplibs_FC='`$ECHO "$link_all_deplibs_FC" | $SED "$delay_single_quote_subst"`'
-always_export_symbols_FC='`$ECHO "$always_export_symbols_FC" | $SED "$delay_single_quote_subst"`'
-export_symbols_cmds_FC='`$ECHO "$export_symbols_cmds_FC" | $SED "$delay_single_quote_subst"`'
-exclude_expsyms_FC='`$ECHO "$exclude_expsyms_FC" | $SED "$delay_single_quote_subst"`'
-include_expsyms_FC='`$ECHO "$include_expsyms_FC" | $SED "$delay_single_quote_subst"`'
-prelink_cmds_FC='`$ECHO "$prelink_cmds_FC" | $SED "$delay_single_quote_subst"`'
-postlink_cmds_FC='`$ECHO "$postlink_cmds_FC" | $SED "$delay_single_quote_subst"`'
-file_list_spec_FC='`$ECHO "$file_list_spec_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_action_FC='`$ECHO "$hardcode_action_FC" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_dirs_FC='`$ECHO "$compiler_lib_search_dirs_FC" | $SED "$delay_single_quote_subst"`'
-predep_objects_FC='`$ECHO "$predep_objects_FC" | $SED "$delay_single_quote_subst"`'
-postdep_objects_FC='`$ECHO "$postdep_objects_FC" | $SED "$delay_single_quote_subst"`'
-predeps_FC='`$ECHO "$predeps_FC" | $SED "$delay_single_quote_subst"`'
-postdeps_FC='`$ECHO "$postdeps_FC" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_path_FC='`$ECHO "$compiler_lib_search_path_FC" | $SED "$delay_single_quote_subst"`'
-
-LTCC='$LTCC'
-LTCFLAGS='$LTCFLAGS'
-compiler='$compiler_DEFAULT'
-
-# A function that is used when there is no print builtin or printf.
-func_fallback_echo ()
-{
-  eval 'cat <<_LTECHO_EOF
-\$1
-_LTECHO_EOF'
-}
-
-# Quote evaled strings.
-for var in SHELL \
-ECHO \
-PATH_SEPARATOR \
-SED \
-GREP \
-EGREP \
-FGREP \
-LD \
-NM \
-LN_S \
-lt_SP2NL \
-lt_NL2SP \
-reload_flag \
-OBJDUMP \
-deplibs_check_method \
-file_magic_cmd \
-file_magic_glob \
-want_nocaseglob \
-DLLTOOL \
-sharedlib_from_linklib_cmd \
-AR \
-AR_FLAGS \
-archiver_list_spec \
-STRIP \
-RANLIB \
-CC \
-CFLAGS \
-compiler \
-lt_cv_sys_global_symbol_pipe \
-lt_cv_sys_global_symbol_to_cdecl \
-lt_cv_sys_global_symbol_to_c_name_address \
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-nm_file_list_spec \
-lt_prog_compiler_no_builtin_flag \
-lt_prog_compiler_pic \
-lt_prog_compiler_wl \
-lt_prog_compiler_static \
-lt_cv_prog_compiler_c_o \
-need_locks \
-MANIFEST_TOOL \
-DSYMUTIL \
-NMEDIT \
-LIPO \
-OTOOL \
-OTOOL64 \
-shrext_cmds \
-export_dynamic_flag_spec \
-whole_archive_flag_spec \
-compiler_needs_object \
-with_gnu_ld \
-allow_undefined_flag \
-no_undefined_flag \
-hardcode_libdir_flag_spec \
-hardcode_libdir_separator \
-exclude_expsyms \
-include_expsyms \
-file_list_spec \
-variables_saved_for_relink \
-libname_spec \
-library_names_spec \
-soname_spec \
-install_override_mode \
-finish_eval \
-old_striplib \
-striplib \
-compiler_lib_search_dirs \
-predep_objects \
-postdep_objects \
-predeps \
-postdeps \
-compiler_lib_search_path \
-LD_FC \
-reload_flag_FC \
-compiler_FC \
-lt_prog_compiler_no_builtin_flag_FC \
-lt_prog_compiler_pic_FC \
-lt_prog_compiler_wl_FC \
-lt_prog_compiler_static_FC \
-lt_cv_prog_compiler_c_o_FC \
-export_dynamic_flag_spec_FC \
-whole_archive_flag_spec_FC \
-compiler_needs_object_FC \
-with_gnu_ld_FC \
-allow_undefined_flag_FC \
-no_undefined_flag_FC \
-hardcode_libdir_flag_spec_FC \
-hardcode_libdir_separator_FC \
-exclude_expsyms_FC \
-include_expsyms_FC \
-file_list_spec_FC \
-compiler_lib_search_dirs_FC \
-predep_objects_FC \
-postdep_objects_FC \
-predeps_FC \
-postdeps_FC \
-compiler_lib_search_path_FC; do
-    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
-    *[\\\\\\\`\\"\\\$]*)
-      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
-      ;;
-    *)
-      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
-      ;;
-    esac
-done
-
-# Double-quote double-evaled strings.
-for var in reload_cmds \
-old_postinstall_cmds \
-old_postuninstall_cmds \
-old_archive_cmds \
-extract_expsyms_cmds \
-old_archive_from_new_cmds \
-old_archive_from_expsyms_cmds \
-archive_cmds \
-archive_expsym_cmds \
-module_cmds \
-module_expsym_cmds \
-export_symbols_cmds \
-prelink_cmds \
-postlink_cmds \
-postinstall_cmds \
-postuninstall_cmds \
-finish_cmds \
-sys_lib_search_path_spec \
-sys_lib_dlsearch_path_spec \
-reload_cmds_FC \
-old_archive_cmds_FC \
-old_archive_from_new_cmds_FC \
-old_archive_from_expsyms_cmds_FC \
-archive_cmds_FC \
-archive_expsym_cmds_FC \
-module_cmds_FC \
-module_expsym_cmds_FC \
-export_symbols_cmds_FC \
-prelink_cmds_FC \
-postlink_cmds_FC; do
-    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
-    *[\\\\\\\`\\"\\\$]*)
-      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
-      ;;
-    *)
-      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
-      ;;
-    esac
-done
-
-ac_aux_dir='$ac_aux_dir'
-xsi_shell='$xsi_shell'
-lt_shell_append='$lt_shell_append'
-
-# See if we are running on zsh, and set the options which allow our
-# commands through without removal of \ escapes INIT.
-if test -n "\${ZSH_VERSION+set}" ; then
-   setopt NO_GLOB_SUBST
-fi
-
-
-    PACKAGE='$PACKAGE'
-    VERSION='$VERSION'
-    TIMESTAMP='$TIMESTAMP'
-    RM='$RM'
-    ofile='$ofile'
-
-
-
-
-
-
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-
-# Handling of arguments.
-for ac_config_target in $ac_config_targets
-do
-  case $ac_config_target in
-    "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;;
-    "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
-    "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
-    "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
-    "lib/Makefile") CONFIG_FILES="$CONFIG_FILES lib/Makefile" ;;
-    "lib/xmlFailures/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlFailures/Makefile" ;;
-    "lib/xmlSuccesses/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlSuccesses/Makefile" ;;
-    "prog/Makefile") CONFIG_FILES="$CONFIG_FILES prog/Makefile" ;;
-
-  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
-  esac
-done
-
-
-# If the user did not use the arguments to specify the items to instantiate,
-# then the envvar interface is used.  Set only those that are not.
-# We use the long form for the default assignment because of an extremely
-# bizarre bug on SunOS 4.1.3.
-if $ac_need_defaults; then
-  test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
-  test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
-  test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
-fi
-
-# Have a temporary directory for convenience.  Make it in the build tree
-# simply because there is no reason against having it here, and in addition,
-# creating and moving files from /tmp can sometimes cause problems.
-# Hook for its removal unless debugging.
-# Note that there is a small window in which the directory will not be cleaned:
-# after its creation but before its name has been assigned to `$tmp'.
-$debug ||
-{
-  tmp= ac_tmp=
-  trap 'exit_status=$?
-  : "${ac_tmp:=$tmp}"
-  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
-' 0
-  trap 'as_fn_exit 1' 1 2 13 15
-}
-# Create a (secure) tmp directory for tmp files.
-
-{
-  tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
-  test -d "$tmp"
-}  ||
-{
-  tmp=./conf$$-$RANDOM
-  (umask 077 && mkdir "$tmp")
-} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
-ac_tmp=$tmp
-
-# Set up the scripts for CONFIG_FILES section.
-# No need to generate them if there are no CONFIG_FILES.
-# This happens for instance with `./config.status config.h'.
-if test -n "$CONFIG_FILES"; then
-
-
-ac_cr=`echo X | tr X '\015'`
-# On cygwin, bash can eat \r inside `` if the user requested igncr.
-# But we know of no other shell where ac_cr would be empty at this
-# point, so we can use a bashism as a fallback.
-if test "x$ac_cr" = x; then
-  eval ac_cr=\$\'\\r\'
-fi
-ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
-if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
-  ac_cs_awk_cr='\\r'
-else
-  ac_cs_awk_cr=$ac_cr
-fi
-
-echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
-_ACEOF
-
-
-{
-  echo "cat >conf$$subs.awk <<_ACEOF" &&
-  echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
-  echo "_ACEOF"
-} >conf$$subs.sh ||
-  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
-ac_delim='%!_!# '
-for ac_last_try in false false false false false :; do
-  . ./conf$$subs.sh ||
-    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-
-  ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
-  if test $ac_delim_n = $ac_delim_num; then
-    break
-  elif $ac_last_try; then
-    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-  else
-    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
-  fi
-done
-rm -f conf$$subs.sh
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
-_ACEOF
-sed -n '
-h
-s/^/S["/; s/!.*/"]=/
-p
-g
-s/^[^!]*!//
-:repl
-t repl
-s/'"$ac_delim"'$//
-t delim
-:nl
-h
-s/\(.\{148\}\)..*/\1/
-t more1
-s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
-p
-n
-b repl
-:more1
-s/["\\]/\\&/g; s/^/"/; s/$/"\\/
-p
-g
-s/.\{148\}//
-t nl
-:delim
-h
-s/\(.\{148\}\)..*/\1/
-t more2
-s/["\\]/\\&/g; s/^/"/; s/$/"/
-p
-b
-:more2
-s/["\\]/\\&/g; s/^/"/; s/$/"\\/
-p
-g
-s/.\{148\}//
-t delim
-' <conf$$subs.awk | sed '
-/^[^""]/{
-  N
-  s/\n//
-}
-' >>$CONFIG_STATUS || ac_write_fail=1
-rm -f conf$$subs.awk
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-_ACAWK
-cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
-  for (key in S) S_is_set[key] = 1
-  FS = ""
-
-}
-{
-  line = $ 0
-  nfields = split(line, field, "@")
-  substed = 0
-  len = length(field[1])
-  for (i = 2; i < nfields; i++) {
-    key = field[i]
-    keylen = length(key)
-    if (S_is_set[key]) {
-      value = S[key]
-      line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
-      len += length(value) + length(field[++i])
-      substed = 1
-    } else
-      len += 1 + keylen
-  }
-
-  print line
-}
-
-_ACAWK
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
-  sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
-else
-  cat
-fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
-  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
-_ACEOF
-
-# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
-# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
-# trailing colons and then remove the whole line if VPATH becomes empty
-# (actually we leave an empty line to preserve line numbers).
-if test "x$srcdir" = x.; then
-  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
-h
-s///
-s/^/:/
-s/[	 ]*$/:/
-s/:\$(srcdir):/:/g
-s/:\${srcdir}:/:/g
-s/:@srcdir@:/:/g
-s/^:*//
-s/:*$//
-x
-s/\(=[	 ]*\).*/\1/
-G
-s/\n//
-s/^[^=]*=[	 ]*$//
-}'
-fi
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-fi # test -n "$CONFIG_FILES"
-
-# Set up the scripts for CONFIG_HEADERS section.
-# No need to generate them if there are no CONFIG_HEADERS.
-# This happens for instance with `./config.status Makefile'.
-if test -n "$CONFIG_HEADERS"; then
-cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
-BEGIN {
-_ACEOF
-
-# Transform confdefs.h into an awk script `defines.awk', embedded as
-# here-document in config.status, that substitutes the proper values into
-# config.h.in to produce config.h.
-
-# Create a delimiter string that does not exist in confdefs.h, to ease
-# handling of long lines.
-ac_delim='%!_!# '
-for ac_last_try in false false :; do
-  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
-  if test -z "$ac_tt"; then
-    break
-  elif $ac_last_try; then
-    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
-  else
-    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
-  fi
-done
-
-# For the awk script, D is an array of macro values keyed by name,
-# likewise P contains macro parameters if any.  Preserve backslash
-# newline sequences.
-
-ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
-sed -n '
-s/.\{148\}/&'"$ac_delim"'/g
-t rset
-:rset
-s/^[	 ]*#[	 ]*define[	 ][	 ]*/ /
-t def
-d
-:def
-s/\\$//
-t bsnl
-s/["\\]/\\&/g
-s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
-D["\1"]=" \3"/p
-s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2"/p
-d
-:bsnl
-s/["\\]/\\&/g
-s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
-D["\1"]=" \3\\\\\\n"\\/p
-t cont
-s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
-t cont
-d
-:cont
-n
-s/.\{148\}/&'"$ac_delim"'/g
-t clear
-:clear
-s/\\$//
-t bsnlc
-s/["\\]/\\&/g; s/^/"/; s/$/"/p
-d
-:bsnlc
-s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
-b cont
-' <confdefs.h | sed '
-s/'"$ac_delim"'/"\\\
-"/g' >>$CONFIG_STATUS || ac_write_fail=1
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-  for (key in D) D_is_set[key] = 1
-  FS = ""
-}
-/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
-  line = \$ 0
-  split(line, arg, " ")
-  if (arg[1] == "#") {
-    defundef = arg[2]
-    mac1 = arg[3]
-  } else {
-    defundef = substr(arg[1], 2)
-    mac1 = arg[2]
-  }
-  split(mac1, mac2, "(") #)
-  macro = mac2[1]
-  prefix = substr(line, 1, index(line, defundef) - 1)
-  if (D_is_set[macro]) {
-    # Preserve the white space surrounding the "#".
-    print prefix "define", macro P[macro] D[macro]
-    next
-  } else {
-    # Replace #undef with comments.  This is necessary, for example,
-    # in the case of _POSIX_SOURCE, which is predefined and required
-    # on some systems where configure will not decide to define it.
-    if (defundef == "undef") {
-      print "/*", prefix defundef, macro, "*/"
-      next
-    }
-  }
-}
-{ print }
-_ACAWK
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
-fi # test -n "$CONFIG_HEADERS"
-
-
-eval set X "  :F $CONFIG_FILES  :H $CONFIG_HEADERS    :C $CONFIG_COMMANDS"
-shift
-for ac_tag
-do
-  case $ac_tag in
-  :[FHLC]) ac_mode=$ac_tag; continue;;
-  esac
-  case $ac_mode$ac_tag in
-  :[FHL]*:*);;
-  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
-  :[FH]-) ac_tag=-:-;;
-  :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
-  esac
-  ac_save_IFS=$IFS
-  IFS=:
-  set x $ac_tag
-  IFS=$ac_save_IFS
-  shift
-  ac_file=$1
-  shift
-
-  case $ac_mode in
-  :L) ac_source=$1;;
-  :[FH])
-    ac_file_inputs=
-    for ac_f
-    do
-      case $ac_f in
-      -) ac_f="$ac_tmp/stdin";;
-      *) # Look for the file first in the build tree, then in the source tree
-	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
-	 # because $ac_f cannot contain `:'.
-	 test -f "$ac_f" ||
-	   case $ac_f in
-	   [\\/$]*) false;;
-	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
-	   esac ||
-	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
-      esac
-      case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
-      as_fn_append ac_file_inputs " '$ac_f'"
-    done
-
-    # Let's still pretend it is `configure' which instantiates (i.e., don't
-    # use $as_me), people would be surprised to read:
-    #    /* config.h.  Generated by config.status.  */
-    configure_input='Generated from '`
-	  $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
-	`' by configure.'
-    if test x"$ac_file" != x-; then
-      configure_input="$ac_file.  $configure_input"
-      { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
-$as_echo "$as_me: creating $ac_file" >&6;}
-    fi
-    # Neutralize special characters interpreted by sed in replacement strings.
-    case $configure_input in #(
-    *\&* | *\|* | *\\* )
-       ac_sed_conf_input=`$as_echo "$configure_input" |
-       sed 's/[\\\\&|]/\\\\&/g'`;; #(
-    *) ac_sed_conf_input=$configure_input;;
-    esac
-
-    case $ac_tag in
-    *:-:* | *:-) cat >"$ac_tmp/stdin" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
-    esac
-    ;;
-  esac
-
-  ac_dir=`$as_dirname -- "$ac_file" ||
-$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$ac_file" : 'X\(//\)[^/]' \| \
-	 X"$ac_file" : 'X\(//\)$' \| \
-	 X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$ac_file" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-  as_dir="$ac_dir"; as_fn_mkdir_p
-  ac_builddir=.
-
-case "$ac_dir" in
-.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
-*)
-  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
-  # A ".." for each directory in $ac_dir_suffix.
-  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
-  case $ac_top_builddir_sub in
-  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
-  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
-  esac ;;
-esac
-ac_abs_top_builddir=$ac_pwd
-ac_abs_builddir=$ac_pwd$ac_dir_suffix
-# for backward compatibility:
-ac_top_builddir=$ac_top_build_prefix
-
-case $srcdir in
-  .)  # We are building in place.
-    ac_srcdir=.
-    ac_top_srcdir=$ac_top_builddir_sub
-    ac_abs_top_srcdir=$ac_pwd ;;
-  [\\/]* | ?:[\\/]* )  # Absolute name.
-    ac_srcdir=$srcdir$ac_dir_suffix;
-    ac_top_srcdir=$srcdir
-    ac_abs_top_srcdir=$srcdir ;;
-  *) # Relative name.
-    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
-    ac_top_srcdir=$ac_top_build_prefix$srcdir
-    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
-esac
-ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
-
-
-  case $ac_mode in
-  :F)
-  #
-  # CONFIG_FILE
-  #
-
-  case $INSTALL in
-  [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
-  *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
-  esac
-  ac_MKDIR_P=$MKDIR_P
-  case $MKDIR_P in
-  [\\/$]* | ?:[\\/]* ) ;;
-  */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
-  esac
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# If the template does not know about datarootdir, expand it.
-# FIXME: This hack should be removed a few years after 2.60.
-ac_datarootdir_hack=; ac_datarootdir_seen=
-ac_sed_dataroot='
-/datarootdir/ {
-  p
-  q
-}
-/@datadir@/p
-/@docdir@/p
-/@infodir@/p
-/@localedir@/p
-/@mandir@/p'
-case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
-*datarootdir*) ac_datarootdir_seen=yes;;
-*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
-$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-  ac_datarootdir_hack='
-  s&@datadir@&$datadir&g
-  s&@docdir@&$docdir&g
-  s&@infodir@&$infodir&g
-  s&@localedir@&$localedir&g
-  s&@mandir@&$mandir&g
-  s&\\\${datarootdir}&$datarootdir&g' ;;
-esac
-_ACEOF
-
-# Neutralize VPATH when `$srcdir' = `.'.
-# Shell code in configure.ac might set extrasub.
-# FIXME: do we really want to maintain this feature?
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_sed_extra="$ac_vpsub
-$extrasub
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-:t
-/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
-s|@configure_input@|$ac_sed_conf_input|;t t
-s&@top_builddir@&$ac_top_builddir_sub&;t t
-s&@top_build_prefix@&$ac_top_build_prefix&;t t
-s&@srcdir@&$ac_srcdir&;t t
-s&@abs_srcdir@&$ac_abs_srcdir&;t t
-s&@top_srcdir@&$ac_top_srcdir&;t t
-s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
-s&@builddir@&$ac_builddir&;t t
-s&@abs_builddir@&$ac_abs_builddir&;t t
-s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
-s&@INSTALL@&$ac_INSTALL&;t t
-s&@MKDIR_P@&$ac_MKDIR_P&;t t
-$ac_datarootdir_hack
-"
-eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
-  >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
-
-test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
-  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
-  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' \
-      "$ac_tmp/out"`; test -z "$ac_out"; } &&
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined" >&5
-$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined" >&2;}
-
-  rm -f "$ac_tmp/stdin"
-  case $ac_file in
-  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
-  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
-  esac \
-  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
- ;;
-  :H)
-  #
-  # CONFIG_HEADER
-  #
-  if test x"$ac_file" != x-; then
-    {
-      $as_echo "/* $configure_input  */" \
-      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
-    } >"$ac_tmp/config.h" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
-    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
-      { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
-$as_echo "$as_me: $ac_file is unchanged" >&6;}
-    else
-      rm -f "$ac_file"
-      mv "$ac_tmp/config.h" "$ac_file" \
-	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
-    fi
-  else
-    $as_echo "/* $configure_input  */" \
-      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
-      || as_fn_error $? "could not create -" "$LINENO" 5
-  fi
-# Compute "$ac_file"'s index in $config_headers.
-_am_arg="$ac_file"
-_am_stamp_count=1
-for _am_header in $config_headers :; do
-  case $_am_header in
-    $_am_arg | $_am_arg:* )
-      break ;;
-    * )
-      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
-  esac
-done
-echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" ||
-$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$_am_arg" : 'X\(//\)[^/]' \| \
-	 X"$_am_arg" : 'X\(//\)$' \| \
-	 X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$_am_arg" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`/stamp-h$_am_stamp_count
- ;;
-
-  :C)  { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
-$as_echo "$as_me: executing $ac_file commands" >&6;}
- ;;
-  esac
-
-
-  case $ac_file$ac_mode in
-    "depfiles":C) test x"$AMDEP_TRUE" != x"" || {
-  # Autoconf 2.62 quotes --file arguments for eval, but not when files
-  # are listed without --file.  Let's play safe and only enable the eval
-  # if we detect the quoting.
-  case $CONFIG_FILES in
-  *\'*) eval set x "$CONFIG_FILES" ;;
-  *)   set x $CONFIG_FILES ;;
-  esac
-  shift
-  for mf
-  do
-    # Strip MF so we end up with the name of the file.
-    mf=`echo "$mf" | sed -e 's/:.*$//'`
-    # Check whether this is an Automake generated Makefile or not.
-    # We used to match only the files named `Makefile.in', but
-    # some people rename them; so instead we look at the file content.
-    # Grep'ing the first line is not enough: some people post-process
-    # each Makefile.in and add a new line on top of each file to say so.
-    # Grep'ing the whole file is not good either: AIX grep has a line
-    # limit of 2048, but all sed's we know have understand at least 4000.
-    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
-      dirpart=`$as_dirname -- "$mf" ||
-$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$mf" : 'X\(//\)[^/]' \| \
-	 X"$mf" : 'X\(//\)$' \| \
-	 X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$mf" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-    else
-      continue
-    fi
-    # Extract the definition of DEPDIR, am__include, and am__quote
-    # from the Makefile without running `make'.
-    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
-    test -z "$DEPDIR" && continue
-    am__include=`sed -n 's/^am__include = //p' < "$mf"`
-    test -z "am__include" && continue
-    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
-    # When using ansi2knr, U may be empty or an underscore; expand it
-    U=`sed -n 's/^U = //p' < "$mf"`
-    # Find all dependency output files, they are included files with
-    # $(DEPDIR) in their names.  We invoke sed twice because it is the
-    # simplest approach to changing $(DEPDIR) to its actual value in the
-    # expansion.
-    for file in `sed -n "
-      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
-	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
-      # Make sure the directory exists.
-      test -f "$dirpart/$file" && continue
-      fdir=`$as_dirname -- "$file" ||
-$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$file" : 'X\(//\)[^/]' \| \
-	 X"$file" : 'X\(//\)$' \| \
-	 X"$file" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$file" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-      as_dir=$dirpart/$fdir; as_fn_mkdir_p
-      # echo "creating $dirpart/$file"
-      echo '# dummy' > "$dirpart/$file"
-    done
-  done
-}
- ;;
-    "libtool":C)
-
-    # See if we are running on zsh, and set the options which allow our
-    # commands through without removal of \ escapes.
-    if test -n "${ZSH_VERSION+set}" ; then
-      setopt NO_GLOB_SUBST
-    fi
-
-    cfgfile="${ofile}T"
-    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
-    $RM "$cfgfile"
-
-    cat <<_LT_EOF >> "$cfgfile"
-#! $SHELL
-
-# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
-# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
-# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
-# NOTE: Changes made to this file will be lost: look at ltmain.sh.
-#
-#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
-#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
-#                 Foundation, Inc.
-#   Written by Gordon Matzigkeit, 1996
-#
-#   This file is part of GNU Libtool.
-#
-# GNU Libtool is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as
-# published by the Free Software Foundation; either version 2 of
-# the License, or (at your option) any later version.
-#
-# As a special exception to the GNU General Public License,
-# if you distribute this file as part of a program or library that
-# is built using GNU Libtool, you may include this file under the
-# same distribution terms that you use for the rest of that program.
-#
-# GNU Libtool is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Libtool; see the file COPYING.  If not, a copy
-# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
-# obtained by writing to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
-
-# The names of the tagged configurations supported by this script.
-available_tags="FC "
-
-# ### BEGIN LIBTOOL CONFIG
-
-# Which release of libtool.m4 was used?
-macro_version=$macro_version
-macro_revision=$macro_revision
-
-# Whether or not to build shared libraries.
-build_libtool_libs=$enable_shared
-
-# Whether or not to build static libraries.
-build_old_libs=$enable_static
-
-# What type of objects to build.
-pic_mode=$pic_mode
-
-# Whether or not to optimize for fast installation.
-fast_install=$enable_fast_install
-
-# Shell to use when invoking shell scripts.
-SHELL=$lt_SHELL
-
-# An echo program that protects backslashes.
-ECHO=$lt_ECHO
-
-# The PATH separator for the build system.
-PATH_SEPARATOR=$lt_PATH_SEPARATOR
-
-# The host system.
-host_alias=$host_alias
-host=$host
-host_os=$host_os
-
-# The build system.
-build_alias=$build_alias
-build=$build
-build_os=$build_os
-
-# A sed program that does not truncate output.
-SED=$lt_SED
-
-# Sed that helps us avoid accidentally triggering echo(1) options like -n.
-Xsed="\$SED -e 1s/^X//"
-
-# A grep program that handles long lines.
-GREP=$lt_GREP
-
-# An ERE matcher.
-EGREP=$lt_EGREP
-
-# A literal string matcher.
-FGREP=$lt_FGREP
-
-# A BSD- or MS-compatible name lister.
-NM=$lt_NM
-
-# Whether we need soft or hard links.
-LN_S=$lt_LN_S
-
-# What is the maximum length of a command?
-max_cmd_len=$max_cmd_len
-
-# Object file suffix (normally "o").
-objext=$ac_objext
-
-# Executable file suffix (normally "").
-exeext=$exeext
-
-# whether the shell understands "unset".
-lt_unset=$lt_unset
-
-# turn spaces into newlines.
-SP2NL=$lt_lt_SP2NL
-
-# turn newlines into spaces.
-NL2SP=$lt_lt_NL2SP
-
-# convert \$build file names to \$host format.
-to_host_file_cmd=$lt_cv_to_host_file_cmd
-
-# convert \$build files to toolchain format.
-to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-
-# An object symbol dumper.
-OBJDUMP=$lt_OBJDUMP
-
-# Method to check whether dependent libraries are shared objects.
-deplibs_check_method=$lt_deplibs_check_method
-
-# Command to use when deplibs_check_method = "file_magic".
-file_magic_cmd=$lt_file_magic_cmd
-
-# How to find potential files when deplibs_check_method = "file_magic".
-file_magic_glob=$lt_file_magic_glob
-
-# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-want_nocaseglob=$lt_want_nocaseglob
-
-# DLL creation program.
-DLLTOOL=$lt_DLLTOOL
-
-# Command to associate shared and link libraries.
-sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-
-# The archiver.
-AR=$lt_AR
-
-# Flags to create an archive.
-AR_FLAGS=$lt_AR_FLAGS
-
-# How to feed a file listing to the archiver.
-archiver_list_spec=$lt_archiver_list_spec
-
-# A symbol stripping program.
-STRIP=$lt_STRIP
-
-# Commands used to install an old-style archive.
-RANLIB=$lt_RANLIB
-old_postinstall_cmds=$lt_old_postinstall_cmds
-old_postuninstall_cmds=$lt_old_postuninstall_cmds
-
-# Whether to use a lock for old archive extraction.
-lock_old_archive_extraction=$lock_old_archive_extraction
-
-# A C compiler.
-LTCC=$lt_CC
-
-# LTCC compiler flags.
-LTCFLAGS=$lt_CFLAGS
-
-# Take the output of nm and produce a listing of raw symbols and C names.
-global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
-
-# Transform the output of nm in a proper C declaration.
-global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
-
-# Transform the output of nm in a C name address pair.
-global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
-
-# Transform the output of nm in a C name address pair when lib prefix is needed.
-global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-# Specify filename containing input files for \$NM.
-nm_file_list_spec=$lt_nm_file_list_spec
-
-# The root where to search for dependent libraries,and in which our libraries should be installed.
-lt_sysroot=$lt_sysroot
-
-# The name of the directory that contains temporary libtool files.
-objdir=$objdir
-
-# Used to examine libraries when file_magic_cmd begins with "file".
-MAGIC_CMD=$MAGIC_CMD
-
-# Must we lock files when doing compilation?
-need_locks=$lt_need_locks
-
-# Manifest tool.
-MANIFEST_TOOL=$lt_MANIFEST_TOOL
-
-# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
-DSYMUTIL=$lt_DSYMUTIL
-
-# Tool to change global to local symbols on Mac OS X.
-NMEDIT=$lt_NMEDIT
-
-# Tool to manipulate fat objects and archives on Mac OS X.
-LIPO=$lt_LIPO
-
-# ldd/readelf like tool for Mach-O binaries on Mac OS X.
-OTOOL=$lt_OTOOL
-
-# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
-OTOOL64=$lt_OTOOL64
-
-# Old archive suffix (normally "a").
-libext=$libext
-
-# Shared library suffix (normally ".so").
-shrext_cmds=$lt_shrext_cmds
-
-# The commands to extract the exported symbol list from a shared archive.
-extract_expsyms_cmds=$lt_extract_expsyms_cmds
-
-# Variables whose values should be saved in libtool wrapper scripts and
-# restored at link time.
-variables_saved_for_relink=$lt_variables_saved_for_relink
-
-# Do we need the "lib" prefix for modules?
-need_lib_prefix=$need_lib_prefix
-
-# Do we need a version for libraries?
-need_version=$need_version
-
-# Library versioning type.
-version_type=$version_type
-
-# Shared library runtime path variable.
-runpath_var=$runpath_var
-
-# Shared library path variable.
-shlibpath_var=$shlibpath_var
-
-# Is shlibpath searched before the hard-coded library search path?
-shlibpath_overrides_runpath=$shlibpath_overrides_runpath
-
-# Format of library name prefix.
-libname_spec=$lt_libname_spec
-
-# List of archive names.  First name is the real one, the rest are links.
-# The last name is the one that the linker finds with -lNAME
-library_names_spec=$lt_library_names_spec
-
-# The coded name of the library, if different from the real name.
-soname_spec=$lt_soname_spec
-
-# Permission mode override for installation of shared libraries.
-install_override_mode=$lt_install_override_mode
-
-# Command to use after installation of a shared archive.
-postinstall_cmds=$lt_postinstall_cmds
-
-# Command to use after uninstallation of a shared archive.
-postuninstall_cmds=$lt_postuninstall_cmds
-
-# Commands used to finish a libtool library installation in a directory.
-finish_cmds=$lt_finish_cmds
-
-# As "finish_cmds", except a single script fragment to be evaled but
-# not shown.
-finish_eval=$lt_finish_eval
-
-# Whether we should hardcode library paths into libraries.
-hardcode_into_libs=$hardcode_into_libs
-
-# Compile-time system search path for libraries.
-sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
-
-# Run-time system search path for libraries.
-sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec
-
-# Whether dlopen is supported.
-dlopen_support=$enable_dlopen
-
-# Whether dlopen of programs is supported.
-dlopen_self=$enable_dlopen_self
-
-# Whether dlopen of statically linked programs is supported.
-dlopen_self_static=$enable_dlopen_self_static
-
-# Commands to strip libraries.
-old_striplib=$lt_old_striplib
-striplib=$lt_striplib
-
-
-# The linker used to build libraries.
-LD=$lt_LD
-
-# How to create reloadable object files.
-reload_flag=$lt_reload_flag
-reload_cmds=$lt_reload_cmds
-
-# Commands used to build an old-style archive.
-old_archive_cmds=$lt_old_archive_cmds
-
-# A language specific compiler.
-CC=$lt_compiler
-
-# Is the compiler the GNU compiler?
-with_gcc=$GCC
-
-# Compiler flag to turn off builtin functions.
-no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
-# Additional compiler flags for building library objects.
-pic_flag=$lt_lt_prog_compiler_pic
-
-# How to pass a linker flag through the compiler.
-wl=$lt_lt_prog_compiler_wl
-
-# Compiler flag to prevent dynamic linking.
-link_static_flag=$lt_lt_prog_compiler_static
-
-# Does compiler simultaneously support -c and -o options?
-compiler_c_o=$lt_lt_cv_prog_compiler_c_o
-
-# Whether or not to add -lc for building shared libraries.
-build_libtool_need_lc=$archive_cmds_need_lc
-
-# Whether or not to disallow shared libs when runtime libs are static.
-allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
-
-# Compiler flag to allow reflexive dlopens.
-export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
-
-# Compiler flag to generate shared objects directly from archives.
-whole_archive_flag_spec=$lt_whole_archive_flag_spec
-
-# Whether the compiler copes with passing no objects directly.
-compiler_needs_object=$lt_compiler_needs_object
-
-# Create an old-style archive from a shared archive.
-old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
-
-# Create a temporary old-style archive to link instead of a shared archive.
-old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
-
-# Commands used to build a shared archive.
-archive_cmds=$lt_archive_cmds
-archive_expsym_cmds=$lt_archive_expsym_cmds
-
-# Commands used to build a loadable module if different from building
-# a shared archive.
-module_cmds=$lt_module_cmds
-module_expsym_cmds=$lt_module_expsym_cmds
-
-# Whether we are building with GNU ld or not.
-with_gnu_ld=$lt_with_gnu_ld
-
-# Flag that allows shared libraries with undefined symbols to be built.
-allow_undefined_flag=$lt_allow_undefined_flag
-
-# Flag that enforces no undefined symbols.
-no_undefined_flag=$lt_no_undefined_flag
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist
-hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
-
-# Whether we need a single "-rpath" flag with a separated argument.
-hardcode_libdir_separator=$lt_hardcode_libdir_separator
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary.
-hardcode_direct=$hardcode_direct
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary and the resulting library dependency is
-# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
-# library is relocated.
-hardcode_direct_absolute=$hardcode_direct_absolute
-
-# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
-# into the resulting binary.
-hardcode_minus_L=$hardcode_minus_L
-
-# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
-# into the resulting binary.
-hardcode_shlibpath_var=$hardcode_shlibpath_var
-
-# Set to "yes" if building a shared library automatically hardcodes DIR
-# into the library and all subsequent libraries and executables linked
-# against it.
-hardcode_automatic=$hardcode_automatic
-
-# Set to yes if linker adds runtime paths of dependent libraries
-# to runtime path list.
-inherit_rpath=$inherit_rpath
-
-# Whether libtool must link a program against all its dependency libraries.
-link_all_deplibs=$link_all_deplibs
-
-# Set to "yes" if exported symbols are required.
-always_export_symbols=$always_export_symbols
-
-# The commands to list exported symbols.
-export_symbols_cmds=$lt_export_symbols_cmds
-
-# Symbols that should not be listed in the preloaded symbols.
-exclude_expsyms=$lt_exclude_expsyms
-
-# Symbols that must always be exported.
-include_expsyms=$lt_include_expsyms
-
-# Commands necessary for linking programs (against libraries) with templates.
-prelink_cmds=$lt_prelink_cmds
-
-# Commands necessary for finishing linking programs.
-postlink_cmds=$lt_postlink_cmds
-
-# Specify filename containing input files.
-file_list_spec=$lt_file_list_spec
-
-# How to hardcode a shared library path into an executable.
-hardcode_action=$hardcode_action
-
-# The directories searched by this compiler when creating a shared library.
-compiler_lib_search_dirs=$lt_compiler_lib_search_dirs
-
-# Dependencies to place before and after the objects being linked to
-# create a shared library.
-predep_objects=$lt_predep_objects
-postdep_objects=$lt_postdep_objects
-predeps=$lt_predeps
-postdeps=$lt_postdeps
-
-# The library search path used internally by the compiler when linking
-# a shared library.
-compiler_lib_search_path=$lt_compiler_lib_search_path
-
-# ### END LIBTOOL CONFIG
-
-_LT_EOF
-
-  case $host_os in
-  aix3*)
-    cat <<\_LT_EOF >> "$cfgfile"
-# AIX sometimes has problems with the GCC collect2 program.  For some
-# reason, if we set the COLLECT_NAMES environment variable, the problems
-# vanish in a puff of smoke.
-if test "X${COLLECT_NAMES+set}" != Xset; then
-  COLLECT_NAMES=
-  export COLLECT_NAMES
-fi
-_LT_EOF
-    ;;
-  esac
-
-
-ltmain="$ac_aux_dir/ltmain.sh"
-
-
-  # We use sed instead of cat because bash on DJGPP gets confused if
-  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
-  # text mode, it properly converts lines to CR/LF.  This bash problem
-  # is reportedly fixed, but why not run on old versions too?
-  sed '$q' "$ltmain" >> "$cfgfile" \
-     || (rm -f "$cfgfile"; exit 1)
-
-  if test x"$xsi_shell" = xyes; then
-  sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-func_dirname ()\
-{\
-\    case ${1} in\
-\      */*) func_dirname_result="${1%/*}${2}" ;;\
-\      *  ) func_dirname_result="${3}" ;;\
-\    esac\
-} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_basename ()$/,/^} # func_basename /c\
-func_basename ()\
-{\
-\    func_basename_result="${1##*/}"\
-} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-func_dirname_and_basename ()\
-{\
-\    case ${1} in\
-\      */*) func_dirname_result="${1%/*}${2}" ;;\
-\      *  ) func_dirname_result="${3}" ;;\
-\    esac\
-\    func_basename_result="${1##*/}"\
-} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-func_stripname ()\
-{\
-\    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-\    # positional parameters, so assign one to ordinary parameter first.\
-\    func_stripname_result=${3}\
-\    func_stripname_result=${func_stripname_result#"${1}"}\
-\    func_stripname_result=${func_stripname_result%"${2}"}\
-} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-func_split_long_opt ()\
-{\
-\    func_split_long_opt_name=${1%%=*}\
-\    func_split_long_opt_arg=${1#*=}\
-} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-func_split_short_opt ()\
-{\
-\    func_split_short_opt_arg=${1#??}\
-\    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-func_lo2o ()\
-{\
-\    case ${1} in\
-\      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-\      *)    func_lo2o_result=${1} ;;\
-\    esac\
-} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_xform ()$/,/^} # func_xform /c\
-func_xform ()\
-{\
-    func_xform_result=${1%.*}.lo\
-} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_arith ()$/,/^} # func_arith /c\
-func_arith ()\
-{\
-    func_arith_result=$(( $* ))\
-} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_len ()$/,/^} # func_len /c\
-func_len ()\
-{\
-    func_len_result=${#1}\
-} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-fi
-
-if test x"$lt_shell_append" = xyes; then
-  sed -e '/^func_append ()$/,/^} # func_append /c\
-func_append ()\
-{\
-    eval "${1}+=\\${2}"\
-} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-func_append_quoted ()\
-{\
-\    func_quote_for_eval "${2}"\
-\    eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  # Save a `func_append' function call where possible by direct use of '+='
-  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-    && mv -f "$cfgfile.tmp" "$cfgfile" \
-      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-  test 0 -eq $? || _lt_function_replace_fail=:
-else
-  # Save a `func_append' function call even when '+=' is not available
-  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-    && mv -f "$cfgfile.tmp" "$cfgfile" \
-      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-  test 0 -eq $? || _lt_function_replace_fail=:
-fi
-
-if test x"$_lt_function_replace_fail" = x":"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-fi
-
-
-   mv -f "$cfgfile" "$ofile" ||
-    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
-  chmod +x "$ofile"
-
-
-    cat <<_LT_EOF >> "$ofile"
-
-# ### BEGIN LIBTOOL TAG CONFIG: FC
-
-# The linker used to build libraries.
-LD=$lt_LD_FC
-
-# How to create reloadable object files.
-reload_flag=$lt_reload_flag_FC
-reload_cmds=$lt_reload_cmds_FC
-
-# Commands used to build an old-style archive.
-old_archive_cmds=$lt_old_archive_cmds_FC
-
-# A language specific compiler.
-CC=$lt_compiler_FC
-
-# Is the compiler the GNU compiler?
-with_gcc=$GCC_FC
-
-# Compiler flag to turn off builtin functions.
-no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_FC
-
-# Additional compiler flags for building library objects.
-pic_flag=$lt_lt_prog_compiler_pic_FC
-
-# How to pass a linker flag through the compiler.
-wl=$lt_lt_prog_compiler_wl_FC
-
-# Compiler flag to prevent dynamic linking.
-link_static_flag=$lt_lt_prog_compiler_static_FC
-
-# Does compiler simultaneously support -c and -o options?
-compiler_c_o=$lt_lt_cv_prog_compiler_c_o_FC
-
-# Whether or not to add -lc for building shared libraries.
-build_libtool_need_lc=$archive_cmds_need_lc_FC
-
-# Whether or not to disallow shared libs when runtime libs are static.
-allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_FC
-
-# Compiler flag to allow reflexive dlopens.
-export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_FC
-
-# Compiler flag to generate shared objects directly from archives.
-whole_archive_flag_spec=$lt_whole_archive_flag_spec_FC
-
-# Whether the compiler copes with passing no objects directly.
-compiler_needs_object=$lt_compiler_needs_object_FC
-
-# Create an old-style archive from a shared archive.
-old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_FC
-
-# Create a temporary old-style archive to link instead of a shared archive.
-old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_FC
-
-# Commands used to build a shared archive.
-archive_cmds=$lt_archive_cmds_FC
-archive_expsym_cmds=$lt_archive_expsym_cmds_FC
-
-# Commands used to build a loadable module if different from building
-# a shared archive.
-module_cmds=$lt_module_cmds_FC
-module_expsym_cmds=$lt_module_expsym_cmds_FC
-
-# Whether we are building with GNU ld or not.
-with_gnu_ld=$lt_with_gnu_ld_FC
-
-# Flag that allows shared libraries with undefined symbols to be built.
-allow_undefined_flag=$lt_allow_undefined_flag_FC
-
-# Flag that enforces no undefined symbols.
-no_undefined_flag=$lt_no_undefined_flag_FC
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist
-hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_FC
-
-# Whether we need a single "-rpath" flag with a separated argument.
-hardcode_libdir_separator=$lt_hardcode_libdir_separator_FC
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary.
-hardcode_direct=$hardcode_direct_FC
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary and the resulting library dependency is
-# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
-# library is relocated.
-hardcode_direct_absolute=$hardcode_direct_absolute_FC
-
-# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
-# into the resulting binary.
-hardcode_minus_L=$hardcode_minus_L_FC
-
-# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
-# into the resulting binary.
-hardcode_shlibpath_var=$hardcode_shlibpath_var_FC
-
-# Set to "yes" if building a shared library automatically hardcodes DIR
-# into the library and all subsequent libraries and executables linked
-# against it.
-hardcode_automatic=$hardcode_automatic_FC
-
-# Set to yes if linker adds runtime paths of dependent libraries
-# to runtime path list.
-inherit_rpath=$inherit_rpath_FC
-
-# Whether libtool must link a program against all its dependency libraries.
-link_all_deplibs=$link_all_deplibs_FC
-
-# Set to "yes" if exported symbols are required.
-always_export_symbols=$always_export_symbols_FC
-
-# The commands to list exported symbols.
-export_symbols_cmds=$lt_export_symbols_cmds_FC
-
-# Symbols that should not be listed in the preloaded symbols.
-exclude_expsyms=$lt_exclude_expsyms_FC
-
-# Symbols that must always be exported.
-include_expsyms=$lt_include_expsyms_FC
-
-# Commands necessary for linking programs (against libraries) with templates.
-prelink_cmds=$lt_prelink_cmds_FC
-
-# Commands necessary for finishing linking programs.
-postlink_cmds=$lt_postlink_cmds_FC
-
-# Specify filename containing input files.
-file_list_spec=$lt_file_list_spec_FC
-
-# How to hardcode a shared library path into an executable.
-hardcode_action=$hardcode_action_FC
-
-# The directories searched by this compiler when creating a shared library.
-compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_FC
-
-# Dependencies to place before and after the objects being linked to
-# create a shared library.
-predep_objects=$lt_predep_objects_FC
-postdep_objects=$lt_postdep_objects_FC
-predeps=$lt_predeps_FC
-postdeps=$lt_postdeps_FC
-
-# The library search path used internally by the compiler when linking
-# a shared library.
-compiler_lib_search_path=$lt_compiler_lib_search_path_FC
-
-# ### END LIBTOOL TAG CONFIG: FC
-_LT_EOF
-
- ;;
-
-  esac
-done # for ac_tag
-
-
-as_fn_exit 0
-_ACEOF
-ac_clean_files=$ac_clean_files_save
-
-test $ac_write_fail = 0 ||
-  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
-
-
-# configure is writing to config.log, and then calls config.status.
-# config.status does its own redirection, appending to config.log.
-# Unfortunately, on DOS this fails, as config.log is still kept open
-# by configure, so config.status won't be able to write to it; its
-# output is simply discarded.  So we exec the FD to /dev/null,
-# effectively closing config.log, so it can be properly (re)opened and
-# appended to by config.status.  When coming back to configure, we
-# need to make the FD available again.
-if test "$no_create" != yes; then
-  ac_cs_success=:
-  ac_config_status_args=
-  test "$silent" = yes &&
-    ac_config_status_args="$ac_config_status_args --quiet"
-  exec 5>/dev/null
-  $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
-  exec 5>>config.log
-  # Use ||, not &&, to avoid exiting from the if with $? = 1, which
-  # would make configure fail if this is the last instruction.
-  $ac_cs_success || as_fn_exit 1
-fi
-if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
-$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
-fi
-
diff --git a/CMake/cdat_modules_extra/uvcdat.in b/CMake/cdat_modules_extra/uvcdat.in
deleted file mode 100755
index 8b1a3f238d520d9ae43c718e7fbab8b86439e8e2..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/uvcdat.in
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-# source is not portable whereas . is
-. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh"
-
-# Used in event of -o "log_location"; grabs the next arg and puts it in target
-capture=false
-# The location we'll be logging to
-target="$HOME/.uvcdat/uvcdatsession.log"
-# Whether or not we're redirecting the stdout/stderr
-redirect=true
-
-for var in "$@"
-do
-    if [ $capture = true ]; then
-        # -o was found, grabbing the next value
-        target=$var
-        if [ "$target" = "" ]; then
-            # This is the way we can redirect output to stdout
-            # Do not redirect output
-            redirect=false
-        fi
-        # Don't need to capture anything else
-        capture=false
-        continue
-    fi
-
-    case $var in
-        # Trigger above block on the next arg
-    -o) capture=true;
-            ;;
-                # Parse the target out of the = section
-    --output=*) target=`sed "s/--output=\(.*\)/\1/" <<< $var`
-        if [ "$target" = "" ]; then
-            # Do not redirect output
-            redirect=false
-        fi
-            ;;
-                  # Do not redirect output
-    --output-std) redirect=false
-            ;;
-            # Shouldn't redirect for help
-    --help) redirect=false
-            ;;
-    *)      ;;
-    esac
-done
-
-if [ $redirect = false ]  ;then
-    python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@"
-else
-    # Replace all uses of ~ with $HOME
-    target="${target/#\~/$HOME}"
-
-    # Check if path exists
-    target_dir="$(dirname $target)"
-    if [ ! -d "$target_dir" ] ;then
-        mkdir -p $target_dir
-    fi
-
-    # Make sure the file exists and that we have write privileges
-    touch $target
-    # Launch with redirection
-    python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@" >>$target 2>&1
-fi
diff --git a/CMake/cdat_modules_extra/uvcdat.mac.in b/CMake/cdat_modules_extra/uvcdat.mac.in
deleted file mode 100755
index 14a394f3733ecf9263e30aa7cdce6a848ea44dea..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/uvcdat.mac.in
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-# source is not portable where as . is
-BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-. $BASEDIR/setup_runtime.sh
-python@PYVER@ $BASEDIR/../vistrails/vistrails/uvcdat.py
-
diff --git a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt b/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt
deleted file mode 100644
index e61b4896d069172a943068b5a0b16229080d4cb4..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt
+++ /dev/null
@@ -1,232 +0,0 @@
-82848263d3f9032b41bc02f758cb0bed  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-01.nc
-09c2f48312305fef59ee571fe1c3a84a  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-02.nc
-3274cb2d7cccffac20059f564a97998e  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-03.nc
-5e677beb0eccfe8c94ec9e18460c2581  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-04.nc
-cd565477d7d8555566e16bf5ff4bfe44  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-05.nc
-d6038ef39f33b6a6d06a3554531a1ed2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-06.nc
-97bf73768c9f50068ffa7399fc0a1e0a  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-07.nc
-705147cb320524d8257dcee8b450aec3  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-08.nc
-164861198d2cb1897713afbeebf9eb62  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-09.nc
-0b342120b940679cab8a2204e6b9f0d0  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-10.nc
-6a12f3a02fc2607afe871f1d4aff7ea2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-11.nc
-0d642c1b3697ff3c45d07b7a90a07fab  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-12.nc
-8b3e27df842aba7dc88b4c13266cc4ed  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-01.nc
-38a7850265356a9b49ab78172b121927  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-02.nc
-30ab14ec20e9ee54ff9ba3bd0332c490  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-03.nc
-2d4c0cf37429c5a1d97be1acc5b907b1  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-04.nc
-72ed71d9937b77e9c01f35ec3924e478  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-05.nc
-62c85090e8b93a0caedebae52a6feddf  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-06.nc
-7c9a70dfc28d7a9eb052f281738adb55  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-07.nc
-d505af09b431fcfb2255fbabcae16ce0  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-08.nc
-2875586ec0f21efd94a4fca640ef7f59  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-09.nc
-6bc3b40018820413633a07c4d8278e50  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-10.nc
-6a56554e98908dbcb1ad04f8129b7e8d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-11.nc
-610ad7ff458a87c863fc2d792e69dc2f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-12.nc
-e17b3f827c0162c246df0a3aabe4ce9d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-01.nc
-79a551fdfb44b88c64fb6552048f4dc5  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-02.nc
-e8c38da3ad16c7866b3b3b540647a5da  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-03.nc
-59de1e4fedabf0976590af6e470ceec1  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-04.nc
-147389dbf5bfb479d09a8982d6690e8b  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-05.nc
-c6f3843a3f716de98693c11bc807c206  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-06.nc
-be02c6100e317dd037ad0cccf9d8a8cf  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-07.nc
-109b769371207a503ac9039b37fd4dad  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-08.nc
-629f86af7dbe6f3b379450f951e3e1b2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-09.nc
-02c3a536f6025ebde38bee00bc69aa09  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-10.nc
-a661f1ce9b87e46865b489fde9752edf  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-11.nc
-7de08765c4e2f9a34e21ba8024453adc  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-12.nc
-28441278df2af93f9ebfa1b51ef21007  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-01.nc
-1576faec4df27627c3eb975e7c6f5fef  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-02.nc
-abccaf09316d0f1705557dd752d359af  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-03.nc
-cde766ef10310253fc3baaa4d5ca8761  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-04.nc
-fd58a1f7d6d2a6037df183e0fca9ff5f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-05.nc
-73a0b57991b798ca2b52e56afcf4f630  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-06.nc
-f45485c533798bb53b4452469a5bc678  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-07.nc
-149bfade64fe7b0b984059954e88ce97  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-08.nc
-ada05ce9162160c9a6c02d9d335c9349  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-09.nc
-aca027b6b88bfa17059ff22945cd393f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-10.nc
-54e738cdb234fcec78d86a49790fafdc  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-11.nc
-151f3e6f7c5a8cbfd31abada8df36dd2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-12.nc
-91b73bdb596231c604d4c76db55bce5e  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-01.nc
-5446fed21e3700d9d90f212ddbdbedc4  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-02.nc
-8f69e20b5993613eb473a904cb3c5cfd  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-03.nc
-6d984999d23f93c2434960f43381556d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-04.nc
-8be183c391e859bc36a8215f276bdd1b  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-05.nc
-6e610ae6499ec706940ce81b3ee5df85  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-06.nc
-2be1078885df583b0a1ee929ef663846  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-07.nc
-493969c7aef835400219722322276ec5  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-08.nc
-055d76ef47600f3b0e0142d6cb4db758  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-09.nc
-12ec6242e2e3269b180c4a2367963327  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-10.nc
-a857e9ae0696c33c38171c7d92791181  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-11.nc
-42097c573ac657ec44bde9aabfa98afd  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-12.nc
-b7198ad93b6eae51fcfd49fb3f9877a9  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-01.nc
-09d6b9c23bf272f7ad8e6eba37e45edb  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-02.nc
-b3ab42c5083df9f901dde9c7fe90bf26  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-03.nc
-4a63c5b704fa1e8fefab025c4e8c46aa  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-04.nc
-4608c9358aa5754352eb9b87d85e7a1c  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-05.nc
-4eff1ec373b9beb820e5e1e4113498aa  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-06.nc
-cffdc3aab308d233c956720d80671b95  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-07.nc
-8dfcd2ecac7d37c12ac0adef4825c67f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-08.nc
-0a196de01ca67ce291a026e755b9921d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-09.nc
-e6931415ab36579fff13f4933a6bf1f5  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-10.nc
-526fbd9987a6d5faf927106bf048aa2b  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-11.nc
-839301c709e5a7b3eb271e75782979af  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-12.nc
-6d86e5edd0a92931226ac99d13167980  acme_lores_atm_climo/F1850.g37_bench_ANN_climo.nc
-f3c5c5a4d91d6e3e0cc4d2df362b0503  acme_lores_atm_climo/F1850.g37_bench_DJF_climo.nc
-06e0cc5f50cd7f2c1e2f30c4c4278b47  acme_lores_cam_climo/F1850.g37_bench_ANN_climo.nc
-712d887975121e81168ab03a535cadba  acme_lores_cam_climo/F1850.g37_bench_APR_climo.nc
-e2e5505205c326eea69574226d881359  acme_lores_cam_climo/F1850.g37_bench_DJF_climo.nc
-d36b0d4f7fb27c3897668131bdec05aa  acme_lores_cam_climo/F1850.g37_bench_JAN_climo.nc
-39342297493a616eb8988ef0a3a9c988  acme_lores_cam_climo/F1850.g37_bench_JJA_climo.nc
-0a8cbf9b41f2cc752800a584f6356cbd  acme_lores_cam_climo/F1850.g37_bench_JUL_climo.nc
-7b2da1926acf2c0f9ffad80497775bb6  acme_lores_cam_climo/F1850.g37_bench_MAM_climo.nc
-7df286b070640d0074c556560edc6a73  acme_lores_cam_climo/F1850.g37_bench_SON_climo.nc
-77d7b6de33467bdebe1a05700f03cae7  acme_lores_clm_climo/ANN_climo.nc
-a075f9d88b0b29b9f6a706f56bc628fa  acme_lores_clm_climo/APR_climo.nc
-f0694a365f88bef9f2ae34169afcd99b  acme_lores_clm_climo/AUG_climo.nc
-3928a8108bed42d5035bb9e9ef06a227  acme_lores_clm_climo/DEC_climo.nc
-5cd00312d791f34b1d33ca336d874473  acme_lores_clm_climo/DJF_climo.nc
-5e2849739943108c549c6724c6927ccd  acme_lores_clm_climo/FEB_climo.nc
-4badd8e20c7e45d8156f0677416d4f85  acme_lores_clm_climo/JAN_climo.nc
-bcc44d7f27938f1f21cf3c34d29dfe0d  acme_lores_clm_climo/JJA_climo.nc
-c11b441acebdf5e7dac696485abd31b8  acme_lores_clm_climo/JUL_climo.nc
-1740586484d8e59b18bf97d89658cd97  acme_lores_clm_climo/JUN_climo.nc
-6aca924e7541a42f37c189934912d4bb  acme_lores_clm_climo/MAM_climo.nc
-16c8c8d84c30d2f72b1bafd7929841a5  acme_lores_clm_climo/MAR_climo.nc
-eb483652fc0b0b069761659262d1d111  acme_lores_clm_climo/MAY_climo.nc
-e3e52b82e64357c50fe42aed7e0ba56c  acme_lores_clm_climo/NOV_climo.nc
-8969b2045cd430d03cebaccb91995f3d  acme_lores_clm_climo/OCT_climo.nc
-4a1d44b3ab16645aef032006be8b4af3  acme_lores_clm_climo/SEP_climo.nc
-f57a1c82229d2985894ef643e0392135  acme_lores_clm_climo/SON_climo.nc
-2a40dbd588429cbefb6317fc48076bb9  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-01.nc
-176fbe665aa0ea9ee3ba63d2df780537  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-02.nc
-cc857575c3b7e81520be03a20fd5fc4c  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-03.nc
-1a01b328a240435c32ea7f4dcc880db6  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-04.nc
-14b1ed3abf5c37c7d3611b57111123a8  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-05.nc
-a2cf201b629578dc40a1a6c8c2ebfdd4  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-06.nc
-3ba6118cecded5739d20ef78d2e75458  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-07.nc
-a42132db7da5c17b9a69aee42951ae3d  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-08.nc
-ee65c00602bc7e0de884e09be4b2bb1d  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-09.nc
-1909f013d84b298eeff19b5250f61daa  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-10.nc
-4b96d62be06f31b8be94388ce59dbeb7  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-11.nc
-486218898744c21420a24ab36121520d  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-12.nc
-950360fe5f5334d3026ba44850c539a6  cam35_data/cam3_5_01_climo.nc
-fc869f4e9c79960f2f0766905379c4c3  cam35_data/cam3_5_02_climo.nc
-c11b0b3283b726318d84edc8ad042714  cam35_data/cam3_5_03_climo.nc
-4d1bfc12f358026addd34d47eca1b52c  cam35_data/cam3_5_04_climo.nc
-da9d0149d3e81d7bdae96076e07daf39  cam35_data/cam3_5_05_climo.nc
-a5526dbaac0a0da52ca04bc5b9c71c56  cam35_data/cam3_5_06_climo.nc
-00662c2eafcb297cf8aabf8c51456d0b  cam35_data/cam3_5_07_climo.nc
-ba72017189a80edd5181d639ae6204e9  cam35_data/cam3_5_08_climo.nc
-b23c87bbf00d39b0966e3a6d072c0abc  cam35_data/cam3_5_09_climo.nc
-4f5d4e732e97c163f63ed1430858c5e3  cam35_data/cam3_5_10_climo.nc
-6abc0b942e43cf5fbadbead8ea2aac26  cam35_data/cam3_5_11_climo.nc
-c9ecb1cbabcc60196263f0a8b488d1e1  cam35_data/cam3_5_12_climo.nc
-84204a1bc34f41f71ed613278b29a57f  cam35_data_smaller/cam3_5_01_climo.nc
-9fcd1364523a26f4fa833a89fc14bae9  cam35_data_smaller/cam3_5_02_climo.nc
-d53f58834cf9053f3255818e441c735a  cam35_data_smaller/cam3_5_03_climo.nc
-7c848ac7acf21552d93273b0ba4817e5  cam35_data_smaller/cam3_5_04_climo.nc
-96248cd867434a51d160ada6af4d0f4f  cam35_data_smaller/cam3_5_05_climo.nc
-155a163a204538164980a1425f4aa301  cam35_data_smaller/cam3_5_06_climo.nc
-b33bf096521235e9fec1a64479438568  cam35_data_smaller/cam3_5_07_climo.nc
-6fe5fcd5a4221dc4ae711ab6631b9cea  cam35_data_smaller/cam3_5_08_climo.nc
-7f2b52b2807e52ab0cdb94e892cec986  cam35_data_smaller/cam3_5_09_climo.nc
-a5121dec5eb93415d8988fb3ae1f279e  cam35_data_smaller/cam3_5_10_climo.nc
-36183ada10292e09053a6573f0d493b6  cam35_data_smaller/cam3_5_11_climo.nc
-018e37b4e760d92edfafcb035173db3d  cam35_data_smaller/cam3_5_12_climo.nc
-8cd47baae6710a9373ebaba96a6e262b  cam_output/c_t_b30.009.cam2.h0.0600-01.nc
-82731ab10329e5cdacfa78ea3da520f2  cam_output/c_t_b30.009.cam2.h0.0600-02.nc
-146a578b04623773ad0e98e930d1a5e5  cam_output/c_t_b30.009.cam2.h0.0600-03.nc
-e6ce8ea3580b3266bd93fc73dcad9adc  cam_output/c_t_b30.009.cam2.h0.0600-04.nc
-a5698548a26c40c514adcadd9623eb27  cam_output/c_t_b30.009.cam2.h0.0600-05.nc
-848918d62382e94bad56a2cc2cd07fd8  cam_output/c_t_b30.009.cam2.h0.0600-06.nc
-bf447ef80bef314a5e2b2003d741a529  cam_output/c_t_b30.009.cam2.h0.0600-07.nc
-be548db39e7607d4153f73e4b5657aa1  cam_output/c_t_b30.009.cam2.h0.0600-08.nc
-0f7764b3aaf5412bdcd70943129026d6  cam_output/c_t_b30.009.cam2.h0.0600-09.nc
-f0ac64dfbf1e5ccb97a167d0f6c75672  cam_output/c_t_b30.009.cam2.h0.0600-10.nc
-7bf5f3401a0fbe8263bac61ca113e7d8  cam_output/c_t_b30.009.cam2.h0.0600-11.nc
-cf83e939285b29ff808ed41544d7df92  cam_output/c_t_b30.009.cam2.h0.0600-12.nc
-6e8cdaf575f9101921d11c571334842f  cam_output/c_t_b30.009.cam2.h0.0601-01.nc
-999693e6583eb4ed322151b68dda4e72  cam_output/c_t_b30.009.cam2.h0.0601-02.nc
-e6d09f6db4fcf81ce68c935277fb110f  cam_output/c_t_b30.009.cam2.h0.0601-03.nc
-635be9948c7e7cecf82c76f953ed0624  cam_output/c_t_b30.009.cam2.h0.0601-04.nc
-a2c14b3f0602aa9ad3b43316f11ae5ff  cam_output/c_t_b30.009.cam2.h0.0601-05.nc
-fbbb8c51f858fe89f4880a41b5f17d04  cam_output/c_t_b30.009.cam2.h0.0601-06.nc
-1e5b7508a062d6aeb16afbf98045a5de  cam_output/c_t_b30.009.cam2.h0.0601-07.nc
-fc30abee308e251bde7be642fa0c3f7a  cam_output/c_t_b30.009.cam2.h0.0601-08.nc
-beafa07dc0c98b09984fd7830eb99f52  cam_output/c_t_b30.009.cam2.h0.0601-09.nc
-4f36607badf32ee9d2c5234a58e779ad  cam_output/c_t_b30.009.cam2.h0.0601-10.nc
-039b724f844a15b936bfe7ee00e79a6e  cam_output/c_t_b30.009.cam2.h0.0601-11.nc
-da7fb4fcc052983bd7e5ac8a63a6a451  cam_output/c_t_b30.009.cam2.h0.0601-12.nc
-f7a5944e246ca97ec722ed72d2e53315  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-01.nc
-c4ad68141d351aea55ce1e9bf0859798  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-04.nc
-bf0b2ef03cd280f5e635870b2ccda8d9  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-07.nc
-6893d78c8c5541999043f19d2dcee035  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-10.nc
-e241fc465279e7126e0e59789d9baedf  obs/NCEP_01_climo.nc
-cd1f8016b4f575c4b2a08a69c78b041a  obs/NCEP_02_climo.nc
-fae4c3bfa51707a9329b274e8de3633e  obs/NCEP_03_climo.nc
-49d418806a382eb17ae1c7cfa5295355  obs/NCEP_04_climo.nc
-97fa9532455053987f1a92645c42ef71  obs/NCEP_05_climo.nc
-078fece9cf0a1730ee13a18211cefa05  obs/NCEP_06_climo.nc
-039a8dd4c98b2e2332699e750f72e2b2  obs/NCEP_07_climo.nc
-4bc14d3447ff3af8c0fec1a19c7cd7b3  obs/NCEP_08_climo.nc
-ef45a99e527f5a36b4a145d9919ac628  obs/NCEP_09_climo.nc
-6673a7bcbf1476015dad7b5106a4213f  obs/NCEP_10_climo.nc
-9e86a777517ad6f4b392f7d63d8e98f7  obs/NCEP_11_climo.nc
-5f4ec5821d1ebb9e5a73c9a46666291a  obs/NCEP_12_climo.nc
-578dcbfb4979cd3cbee2bde42a52d5c7  obs/NCEP_ANN_climo.nc
-78c01194a72dc3da7b25c1ce402dfe7b  obs/NCEP_DJF_climo.nc
-dcd392831c5c0628fde4f92e2f704c18  obs/NCEP_JJA_climo.nc
-185a376e3e6403191d42dbef55b72928  obs_atmos/c_CRU_ANN_climo.nc
-9c754380f93e4305c5ed40b67d7282e5  obs_atmos/c_CRU_DJF_climo.nc
-a8b02bd2ea54d089db13005e7a9b4999  obs_atmos/c_CRU_JJA_climo.nc
-ef18dbf141367c0d7cf3990d7e10d64c  obs_atmos/c_t_NCEP_01_climo.nc
-10c09087712b3b283765381c78002154  obs_atmos/c_t_NCEP_02_climo.nc
-3bcec656166614c11ad1f436129b4922  obs_atmos/c_t_NCEP_03_climo.nc
-bf326d77aceedcdf7197b6ca4d7624df  obs_atmos/c_t_NCEP_04_climo.nc
-631dadd9a88b46a47506fa2b2cc0cc1e  obs_atmos/c_t_NCEP_05_climo.nc
-3b65eb064433b28d9e23aaf260994768  obs_atmos/c_t_NCEP_06_climo.nc
-dd2962224eb21be51dd2e1d38d4d7bfc  obs_atmos/c_t_NCEP_07_climo.nc
-a7f0f0a58959c30f4342a643537d5791  obs_atmos/c_t_NCEP_08_climo.nc
-16f1fb6a6fd60428a24821dfdbf9ba3f  obs_atmos/c_t_NCEP_09_climo.nc
-c1c5580c10e6017d7a1b4c844f4bee95  obs_atmos/c_t_NCEP_10_climo.nc
-58ca74759be8e809e6113309163eb87e  obs_atmos/c_t_NCEP_11_climo.nc
-0a34a591d117471b83ec15d41ca4de5e  obs_atmos/c_t_NCEP_12_climo.nc
-53a07928fd5bb8282e3b00707c30d352  obs_atmos/c_t_NCEP_ANN_climo.nc
-07fbdfe7c5ac96dca4d5b30cf0ffca4d  obs_atmos/c_t_NCEP_DJF_climo.nc
-bba7b95da836594ba56eccc5cc735953  obs_atmos/c_t_NCEP_JJA_climo.nc
-ded2539f0946958f20946211ec6de7c6  obs_data_12/._RAOBS.nc
-2df5c553f24cf4e51a826a34075a6122  obs_data_12/RAOBS.nc
-3057f458f2eea7e29b5df6622b71c5c6  obs_data_13/ISCCPCOSP_01_climo.nc
-863fdc036ca6c8bc181b68934fb5f334  obs_data_13/ISCCPCOSP_02_climo.nc
-44d91325876baa34dd53a3d5fdebc8a5  obs_data_13/ISCCPCOSP_03_climo.nc
-2821ea5e0d7d1ab2e32486e6336c07b5  obs_data_13/ISCCPCOSP_04_climo.nc
-dc5823c8971136e536c1f7c7d8f8452f  obs_data_13/ISCCPCOSP_05_climo.nc
-b0fb19767ddf330a4dd37a429810b9d9  obs_data_13/ISCCPCOSP_06_climo.nc
-a07c2a2e6adfed391c53a0aff0c436ab  obs_data_13/ISCCPCOSP_07_climo.nc
-ca089074a4f3d1fe7f6897c0c88b1b6b  obs_data_13/ISCCPCOSP_08_climo.nc
-9f9c9897dc8e09e18f155fe5355d1ed8  obs_data_13/ISCCPCOSP_09_climo.nc
-d74abae2b663ea67cf95de9b5f4e8485  obs_data_13/ISCCPCOSP_10_climo.nc
-ba01b312ad7fc2f936299798c963114c  obs_data_13/ISCCPCOSP_11_climo.nc
-0a20a6f6220e941ad84e75347d044ff0  obs_data_13/ISCCPCOSP_12_climo.nc
-f422c02f76cfd8ffdc3d664f7df29fa5  obs_data_13/ISCCPCOSP_ANN_climo.nc
-c0c6e18ef0202b8da755210ff5bab6d0  obs_data_13/ISCCPCOSP_DJF_climo.nc
-a52e9a734e34d3b6198f836c407a834b  obs_data_13/ISCCPCOSP_JJA_climo.nc
-0692a353d71f86e3b008f5b7136fead4  obs_data_13/ISCCPCOSP_MAM_climo.nc
-65790f602a139f5e7ac561c0f50073a6  obs_data_13/ISCCPCOSP_SON_climo.nc
-25da719f4a94f073b344d463ef46dd5c  obs_data_5.6/ERS_01_climo.nc
-82938151479416212514ea92f5c8944d  obs_data_5.6/ERS_02_climo.nc
-4474e171bc3ed010bc4cf85f2156331c  obs_data_5.6/ERS_03_climo.nc
-5928149aaa7e20e8e021051e4c1cf8af  obs_data_5.6/ERS_04_climo.nc
-8ba71cabf16409ec359250137313e1fc  obs_data_5.6/ERS_05_climo.nc
-7173b6c6ad21ebba3faae364bb0e2abd  obs_data_5.6/ERS_06_climo.nc
-4a4dce6ec29ff746e6ca438a1144e2f9  obs_data_5.6/ERS_07_climo.nc
-89b82d69760e786d4c5cd6007e67ad8e  obs_data_5.6/ERS_08_climo.nc
-703d8a3c2bca30d721db74e4a9607991  obs_data_5.6/ERS_09_climo.nc
-6be5b6eaacbd4bfee413b0432a3822bd  obs_data_5.6/ERS_10_climo.nc
-3aab5e306b45952d4bc538cf09733d36  obs_data_5.6/ERS_11_climo.nc
-b7d52d062f54e6c28b73c1630866eb8f  obs_data_5.6/ERS_12_climo.nc
-257874570e3aeeda6cbd55accf60f6c9  obs_data_5.6/ERS_ANN_climo.nc
-d7fc6bbb9a2dfdb0fa44d7835f94a3d4  obs_data_5.6/ERS_DJF_climo.nc
-3cce9af23687f27d3b134f60039ebdce  obs_data_5.6/ERS_JJA_climo.nc
-aaedba911f145e711d05b6430e13ce4e  obs_data_5.6/ERS_MAM_climo.nc
-e40f05dfec15f145e9623290d5142705  obs_data_5.6/ERS_SON_climo.nc
diff --git a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in b/CMake/cdat_modules_extra/vacumm_build_step.cmake.in
deleted file mode 100644
index 8ef121b843a7bddd4d5de80fa0ae6411bacf9f79..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@vacumm_source_dir@"
-  )
diff --git a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in b/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in
deleted file mode 100644
index 9d7ba552bf5c112539857caef20973063da54495..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in
+++ /dev/null
@@ -1,36 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib")
-set(ENV{CFLAGS} "@cdat_osx_flags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CFLAGS@")
-set(ENV{CPPFLAGS} "@cdat_osx_cppflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CPPFLAGS@")
-set(ENV{CXXFLAGS} "@cdat_osx_cxxflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CXXFLAGS@")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @cdat_BINARY_DIR@/build/ParaView-build/VTK/Wrapping/Python
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in VTK Python Install")
-endif()
-
-if (APPLE)
- message("We are on a Mac, need to relink all libraries")
- execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/fixlink.py
-  WORKING_DIRECTORY @cdat_BINARY_DIR@
-  OUTPUT_VARIABLE out
-  ERROR_VARIABLE err
-  RESULT_VARIABLE res)
- message("got: "${res})
-endif ()
-
-message("Install succeeded.")
-
diff --git a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in b/CMake/cdat_modules_extra/xgks_configure_step.cmake.in
deleted file mode 100644
index 0abcb5c96ac60b287fbc85c986667834336be49f..0000000000000000000000000000000000000000
--- a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND env FC="" sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/curses_gcc5.patch b/CMake/curses_gcc5.patch
deleted file mode 100644
index a1ee0240b8d43944ad9b0b48ae37bbc76dd52261..0000000000000000000000000000000000000000
--- a/CMake/curses_gcc5.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-index d8cc3c9..b91398c 100755
---- a/ncurses/base/MKlib_gen.sh
-+++ b/ncurses/base/MKlib_gen.sh
-@@ -491,11 +492,22 @@ sed -n -f $ED1 \
-	-e 's/gen_$//' \
-	-e 's/  / /g' >>$TMP
-
-+cat >$ED1 <<EOF
-+s/  / /g
-+s/^ //
-+s/ $//
-+s/P_NCURSES_BOOL/NCURSES_BOOL/g
-+EOF
-+
-+# A patch discussed here:
-+#	https://gcc.gnu.org/ml/gcc-patches/2014-06/msg02185.html
-+# introduces spurious #line markers.  Work around that by ignoring the system's
-+# attempt to define "bool" and using our own symbol here.
-+sed -e 's/bool/P_NCURSES_BOOL/g' $TMP > $ED2
-+cat $ED2 >$TMP
-+
- $preprocessor $TMP 2>/dev/null \
--| sed \
--	-e 's/  / /g' \
--	-e 's/^ //' \
--	-e 's/_Bool/NCURSES_BOOL/g' \
-+| sed -f $ED1 \
- | $AWK -f $AW2 \
- | sed -f $ED3 \
- | sed \
diff --git a/CMake/dummy.f90 b/CMake/dummy.f90
deleted file mode 100644
index 4bbd9fbdc3f439ef6194449c1befe8b54e21e35e..0000000000000000000000000000000000000000
--- a/CMake/dummy.f90
+++ /dev/null
@@ -1,4 +0,0 @@
-        PROGRAM dummy
-
-        print*, "Hi"
-        END
diff --git a/CMake/fixName.py b/CMake/fixName.py
deleted file mode 100644
index 17f2a060670d6c5761e73bc67553ea147b6dd120..0000000000000000000000000000000000000000
--- a/CMake/fixName.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python
-
-
-import sys
-
-fnm = sys.prefix+"/Resources/Python.app/Contents/Info.plist"
-
-f=open(fnm)
-s=f.read()
-pat="<key>CFBundleName</key>"
-i=s.find(pat)#<string>Python</string>")
-s2=s[:i+len(pat)]+s[i+len(pat):].replace("Python","UV-CDAT",1)
-f=open(fnm,'w')
-f.write(s2)
-f.close()
diff --git a/CMake/fix_install_name.py.in b/CMake/fix_install_name.py.in
deleted file mode 100644
index 787490a48e735b314984e7c75c52c0675b88b130..0000000000000000000000000000000000000000
--- a/CMake/fix_install_name.py.in
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-import sys
-import fnmatch
-
-def find_files(directory, pattern):
-    for root, dirs, files in os.walk(directory):
-        for basename in files:
-            if fnmatch.fnmatch(basename, pattern):
-                filename = os.path.join(root, basename)
-                yield filename
-
-exts = ['*.dylib', '*.so']
-
-# Find all the modules and shared libraries and replace the path contained 
-# if referencing the built ones
-for pattern in exts:
-  for library in find_files("./", pattern):
-    print library
-    cmd = 'otool -L %s' % library
-    print "library is", library
-    deps = os.popen(cmd).readlines()
-    for dep in deps[1:]:
-      dep_name = os.path.split(dep)[1]
-      dep_name = dep_name.split()[0]
-      dep = dep.split()[0]
-      # Replace the ones that are built by us
-      if fnmatch.fnmatch(dep_name, pattern) and fnmatch.fnmatch(dep, "@CMAKE_INSTALL_PREFIX@*"):
-        print 'dep fullpath ', dep
-        print 'dep name', dep_name
-        cmd = "install_name_tool -change %s %s %s" % (dep, "@rpath/"+"".join(dep_name), library)
-        print 'change cmd is ', cmd
-        lns = os.popen(cmd)
-        print "\t"+"".join(lns)
diff --git a/CMake/fixlink.py b/CMake/fixlink.py
deleted file mode 100644
index 808baa7f2d4f7ff422ded276ccfde2a34ee94fec..0000000000000000000000000000000000000000
--- a/CMake/fixlink.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import os,sys,numpy
-lib = '/usr/local/uvcdat/1.0.alpha/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/paraview/vtk/vtkCommonPython.so'
-bad = 'ParaView-build'
-#bad="System"
-#bad="paraview3.11"
-def change(lib,bad,paraviewPath,sameDir=False):
-    cmd = 'otool -L %s' % lib
-    print "LIB:",lib
-    ln=os.popen(cmd).readlines()
-    for l in ln[1:]:
-        link = l.strip().split()[0]
-        if link.find(bad)>-1:
-            print link,"\t",
-            nm=os.path.split(link)[1]
-            print nm
-            cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,nm,lib)
-            print "\t",cmd
-            lns = os.popen(cmd)
-            print "\t"+"".join(lns)
-        if sameDir:
-            if link[:6] in ["libvtk","libXdm","libKWC","libQVT","libVPI","libCos","libpro"]:
-                cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,link,lib)
-                print "\t",cmd
-                lns = os.popen(cmd)
-                print "\t"+"".join(lns)
-            
-
-inpath =  "/".join(numpy.__path__[0].split("/")[:-1]+["paraview",])
-inpath2 =  "/".join(numpy.__path__[0].split("/")[:-1]+["paraview","vtk"])
-inpath3 =  "/".join(numpy.__path__[0].split("/")[:-1]+["vtk"])
-inpath4 =  "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview",])
-inpath5 =  "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview","vtk"])
-inpath6 =  "/".join(numpy.__path__[0].split("/")[:-1]+["VTK-5.9.0-py2.7.egg","vtk"])
-paraviewPath = "/".join(sys.prefix.split("/")[:-5]+["Externals","lib","paraview-3.11"]) #= '/usr/local/uvcdat/1.0.alpha/Externals/lib/paraview-3.11/'
-def doPath(inpath,paraviewPath,sameDir=False):
-    files = os.popen("ls %s" % inpath).readlines()
-    for f in files:
-        lib = inpath+"/"+f.strip()
-        print lib
-        change(lib,bad,paraviewPath,sameDir)
-doPath(inpath,paraviewPath)
-doPath(inpath2,paraviewPath)
-doPath(inpath3,paraviewPath)
-doPath(inpath4,paraviewPath)
-doPath(inpath5,paraviewPath)
-doPath(inpath6,paraviewPath)
-doPath(paraviewPath,paraviewPath,True)
-
-
diff --git a/CMake/install.py b/CMake/install.py
deleted file mode 100644
index 2e37d9791e0c76231c3d77d388a15d2c3468a862..0000000000000000000000000000000000000000
--- a/CMake/install.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import os
-
-# The main installation script is installation/install.py
-# However, we need to first check for problems using 1.5.2 syntax only.
-current_dir = os.path.dirname(__file__)
-
-execfile(os.path.join(current_dir, 'test_python_ok.py'))
-
-install_script_path = os.path.join(current_dir, '..', 'installation', 'install.py')
-execfile(install_script_path)
diff --git a/CMake/netcdf_clang.patch b/CMake/netcdf_clang.patch
deleted file mode 100644
index 44729cf6ef0ff4e4684b35a240ce169188e87920..0000000000000000000000000000000000000000
--- a/CMake/netcdf_clang.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/ncgen3/load.c b/git/uvcdat/exsrc/ncgen3_load.c
-index 3da4712..147f4e7 100644
---- a/ncgen3/load.c
-+++ b/git/uvcdat/exsrc/ncgen3_load.c
-@@ -5,6 +5,7 @@
-  *********************************************************************/
- 
- #include <stdio.h>
-+#include <config.h>
- #include <stdlib.h>
- #include <string.h>
- #include <ctype.h>
diff --git a/CMake/pyspharm_setup.patch b/CMake/pyspharm_setup.patch
deleted file mode 100644
index da643719824e223396593ec14740d0639deffd70..0000000000000000000000000000000000000000
--- a/CMake/pyspharm_setup.patch
+++ /dev/null
@@ -1,19 +0,0 @@
---- setup.py.ok	2015-07-28 15:37:07.000000000 -0700
-+++ setup.py	2015-07-28 15:53:10.000000000 -0700
-@@ -27,12 +27,12 @@
-  the terms of the SPHEREPACK license at
-  http://www2.cisl.ucar.edu/resources/legacy/spherepack/license\n
-  """)
--    download = raw_input('Do you want to download SPHEREPACK now? (yes or no)')
--    if download not in ['Y','y','yes','Yes','YES']:
--        sys.exit(0)
-+    # download = raw_input('Do you want to download SPHEREPACK now? (yes or no)')
-+    # if download not in ['Y','y','yes','Yes','YES']:
-+    #     sys.exit(0)
-     import urllib, tarfile
-     tarfname = 'spherepack3.2.tar'
--    URL="https://www2.cisl.ucar.edu/sites/default/files/"+tarfname
-+    URL="http://uvcdat.llnl.gov/cdat/resources/"+tarfname
-     urllib.urlretrieve(URL,tarfname)
-     if not os.path.isfile(tarfname):
-         raise IOError('Sorry, download failed')
diff --git a/CMake/python_patch_step.cmake.in b/CMake/python_patch_step.cmake.in
deleted file mode 100644
index c1cb47384b579e6df66865e1f6b0a135b2d87266..0000000000000000000000000000000000000000
--- a/CMake/python_patch_step.cmake.in
+++ /dev/null
@@ -1,15 +0,0 @@
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py @python_SOURCE_DIR@/setup.py
-)
-
-# Refer: http://bugs.python.org/issue14572
-if(NOT WIN32)
-  execute_process(
-    WORKING_DIRECTORY @python_SOURCE_DIR@
-    COMMAND patch -p1
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/sqlite3_int64_v2.patch
-  )
-endif()
-
-
diff --git a/CMake/sqlite3_int64_v2.patch b/CMake/sqlite3_int64_v2.patch
deleted file mode 100644
index 3a3ab31d67de2666bca40294b26d33c25667e58c..0000000000000000000000000000000000000000
--- a/CMake/sqlite3_int64_v2.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-# HG changeset patch
-# Parent 4641d8d99a7dd56c76aa7f769d6d91499113a3b8
-
-diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
---- a/Modules/_sqlite/connection.c
-+++ b/Modules/_sqlite/connection.c
-@@ -549,7 +549,7 @@
-     } else if (py_val == Py_None) {
-         sqlite3_result_null(context);
-     } else if (PyInt_Check(py_val)) {
--        sqlite3_result_int64(context, (sqlite3_int64)PyInt_AsLong(py_val));
-+        sqlite3_result_int64(context, (sqlite_int64)PyInt_AsLong(py_val));
-     } else if (PyLong_Check(py_val)) {
-         sqlite3_result_int64(context, PyLong_AsLongLong(py_val));
-     } else if (PyFloat_Check(py_val)) {
-@@ -580,7 +580,7 @@
-     sqlite3_value* cur_value;
-     PyObject* cur_py_value;
-     const char* val_str;
--    sqlite3_int64 val_int;
-+    sqlite_int64 val_int;
-     Py_ssize_t buflen;
-     void* raw_buffer;
- 
diff --git a/CMake/test_python_ok.py b/CMake/test_python_ok.py
deleted file mode 100644
index 274e15ac97072450ec2584edee90cdb25b95fcda..0000000000000000000000000000000000000000
--- a/CMake/test_python_ok.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import sys, os
-# The main installation script is installation/install.py
-# However, we need to first check for problems using 1.5.2 syntax only.
-trouble = 0
-minimum_python_version = (2,5,0,'final',0)
-if not hasattr(sys, 'version_info') or sys.version_info < minimum_python_version:
-    sys.stderr.write("Your Python is too old; please see README.txt.\n")
-    trouble = 1
-for x in ["PYTHONHOME"]:
-    if os.environ.has_key(x):
-        sys.stderr.write('Please undefine ' + x + ' before installation.\n')
-        trouble = 1
-if not os.environ.has_key('HOME'):
-    sys.stderr.write(\
-"Caution: You'll need to set environment variable HOME before using CDAT.\n")
-
-if trouble:
-    raise SystemExit, 1
-print 'Your Python checked OK!'
diff --git a/CMake/travis_build.cmake b/CMake/travis_build.cmake
deleted file mode 100644
index 83c8214350bf12f45e748fdd3d3feee211da07ba..0000000000000000000000000000000000000000
--- a/CMake/travis_build.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}")
-set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build")
-
-include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake)
-set(CTEST_SITE "Travis")
-set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}")
-set(CTEST_CMAKE_GENERATOR "Unix Makefiles")
-
-ctest_start("Continuous")
-ctest_configure()
-ctest_build()
-#ctest_test(INCLUDE vcs PARALLEL_LEVEL 1 RETURN_VALUE res)
-#ctest_coverage()
-#file(REMOVE ${CTEST_BINARY_DIRECTORY}/coverage.xml)
-
-#if(NOT res EQUAL 0)
-#  message(FATAL_ERROR "Test failures occurred.")
-#endif()
diff --git a/CMake/travis_submit.cmake b/CMake/travis_submit.cmake
deleted file mode 100644
index 285e876e5c780c34dd0a90024ffce9d998c0ece0..0000000000000000000000000000000000000000
--- a/CMake/travis_submit.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}")
-set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build")
-
-include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake)
-set(CTEST_SITE "Travis")
-set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}")
-set(CTEST_CMAKE_GENERATOR "Unix Makefiles")
-
-ctest_start("Continuous")
-ctest_submit()
diff --git a/CMake/uvcdat.plist b/CMake/uvcdat.plist
deleted file mode 100644
index 496982c351d7837bf99eb1ddb2f681ff9c636805..0000000000000000000000000000000000000000
--- a/CMake/uvcdat.plist
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>uvcdat</string>
-	<key>CFBundleGetInfoString</key>
-	<string></string>
-	<key>CFBundleIconFile</key>
-	<string>uvcdat.icns</string>
-	<key>CFBundleIdentifier</key>
-	<string></string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleLongVersionString</key>
-	<string></string>
-	<key>CFBundleName</key>
-	<string>UVCDAT</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string></string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string></string>
-	<key>CSResourcesFileMapped</key>
-	<true/>
-	<key>LSRequiresCarbon</key>
-	<true/>
-  <key>LSUIElement</key>
-  <string>1</string>
-	<key>NSHumanReadableCopyright</key>
-	<string></string>
-</dict>
-</plist>
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 32b16d45335864e2cb2cd521e30267655879ff60..bd7d1fa591fce4952ec29902a16b1a1a824634c9 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,12 +1,4 @@
-#=============================================================================
 cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)
-CMAKE_POLICY(SET CMP0012 NEW)
-
-if ("${CMAKE_VERSION}" VERSION_LESS "2.8.12")
-  message(WARNING "Your CMake version is ${CMAKE_VERSION} which is depreciated for UV-CDAT. The recommended minimum CMake version is 2.8.12. Using older versions can result in build errors particularly with Xcode 5")
-endif()
-
-# Project name and initial checks
 #=============================================================================
 project(cdat)
 
@@ -19,15 +11,6 @@ set(cdat_external_patch_dir ${cdat_SOURCE_DIR}/exsrc)
 
 
 
-if("${CMAKE_INSTALL_PREFIX}" STREQUAL "/usr/local")
-  get_filename_component(cdat_ROOT_DIR ${cdat_BINARY_DIR} PATH)
-  set(CMAKE_INSTALL_PREFIX ${cdat_BINARY_DIR}/install CACHE STRING "" FORCE)
-endif()
-
-set(cdat_EXTERNALS ${CMAKE_INSTALL_PREFIX}/Externals)
-set(ENV{PATH} "${cdat_EXTERNALS}/bin:$ENV{PATH}")
-message("[INFO] We reset your path to: " $ENV{PATH})
-
 set(CMAKE_MODULE_PATH
   ${cdat_CMAKE_SOURCE_DIR}
   ${cdat_CMAKE_SOURCE_DIR}/cmake_modules
@@ -36,6 +19,18 @@ set(CMAKE_MODULE_PATH
   ${CMAKE_MODULE_PATH}
 )
 
+find_program(CONDA NAMES conda)
+
+if ( ${CONDA} STREQUAL "CONDA-NOTFOUND" ) 
+    message(FATAL_ERROR "Could not locate conda, please make sure conda is installed and in your PATH")
+endif()
+
+find_program(ACTIVATE NAMES activate)
+if ( ${ACTIVATE} STREQUAL "ACTIVATE-NOTFOUND" ) 
+    message(FATAL_ERROR "Could not locate activate, please make sure conda is installed and in your PATH")
+endif()
+message("[INFO] Found conda at: ${CONDA}")
+
 if (DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
   if (($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "yes") OR
       ($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "no"))
@@ -49,64 +44,12 @@ else()
   set(ENV{UVCDAT_ANONYMOUS_LOG} "no")
 endif()
 
-# Disable in source build of any kind.
-#=============================================================================
-include(CheckBuildOutOfSource)
-check_build_out_of_source("${cdat_SOURCE_DIR}" "${cdat_BINARY_DIR}"
-                          BUILDINSOURCE)
-if(BUILDINSOURCE)
-   set(msg "[ERROR] CDAT requires an out of source Build.")
-   set(msg "${msg}\nRun 'git clean -dfx' to restore source dir.")
-   message(FATAL_ERROR "${msg}")
-endif()
-
 # Include useful cmake scripts
 #=============================================================================
 include(cmake_utils)
 include(check_fortran)
 include(CTest)
 
-# Enable/Disable coverage
-#=============================================================================
-option(CDAT_MEASURE_COVERAGE "Measure test coverage while running tests" OFF)
-
-if(CDAT_MEASURE_COVERAGE)
-  message("Coverage measurement enabled; tests will run slower.")
-  set(COVERAGE_PKGS "cdms2,vcs,cdutil,genutil,DV3D,vcsaddons,vcs.vtk_ui,vcs.editors,vcs.vcsvtk,regrid2")
-  configure_file(${cdat_CMAKE_SOURCE_DIR}/coverage_report.py.in
-    ${CMAKE_INSTALL_PREFIX}/bin/coverage_report
-    @ONLY
-  )
-endif()
-
-# Set up the test data. If UVCDAT_USE_SYSTEM_TESTDATA is ON and UVCDAT_TESTDATA
-# is not set then we won't use it for testing. Otherwise we'll test either
-# with the system test data or download it ourselves.
-#=============================================================================
-if (BUILD_TESTING)
-  set(UVCDAT_USE_SYSTEM_TESTDATA ON CACHE BOOL "Use UV-CDAT's test data from the system")
-  if(UVCDAT_USE_SYSTEM_TESTDATA)
-    set(UVCDAT_TESTDATA "" CACHE PATH "Location of UV-CDAT test data")
-    set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA})
-  else()
-    set(UVCDAT_TestData_GZ boonth-1-22-2013.p94m.tar.gz)
-    set(UVCDAT_TestData_MD5 cf47adb0b6164997fb122ccbc3bd6f92)
-    file(DOWNLOAD ${LLNL_URL}/${UVCDAT_TestData_GZ} ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ}
-      STATUS testdatastatus SHOW_PROGRESS EXPECTED_MD5 ${UVCDAT_TestData_MD5})
-    list(GET testdatastatus 0 actualtestdatastatus)
-    if(actualtestdatastatus)
-      message("[WARNING] Unable to automatically download test data ${testdatastatus}")
-    else()
-      set(UVCDAT_TESTDATA_DIR ${CMAKE_BINARY_DIR}/UVCDAT_TestData)
-      file(MAKE_DIRECTORY ${UVCDAT_TESTDATA_DIR})
-      execute_process(
-        COMMAND ${CMAKE_COMMAND} -E tar xzf ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ}
-        WORKING_DIRECTORY ${UVCDAT_TESTDATA_DIR})
-      set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA_DIR})
-    endif()
-  endif()
-endif()
-
 # Change architecture *before* any enable_language() or project()
 # calls so that it's set properly to detect 64-bit-ness...
 #-----------------------------------------------------------------------------
@@ -143,50 +86,6 @@ if(NOT GIT_PROTOCOL)
   set_property(CACHE GIT_PROTOCOL PROPERTY STRINGS "git://" "http://" "https://")
 endif()
 
-if(GIT_PROTOCOL MATCHES "http://")
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-      COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-    )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-else()
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-      COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-    )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-endif()
-
 # Checkout the baseline repository.
 #=============================================================================
 if(BUILD_TESTING)
@@ -231,7 +130,7 @@ set(PARTS_BUILT_INFO "${cdat_BINARY_DIR}/build_info.txt" CACHE STRING "File wher
 # files in order to move them (somehow) to the OFFLINE machine where build will happen
 # OFF the machine has no internet access all files are suppposed to be here, pre-downloaded
 
-option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF)
+# option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF)
 option(CDAT_BUILD_PARALLEL "Build parallel components of CDAT" OFF)
 
 # OSMesa/VTK aren't playing nicely on macs. Disabling for now.
@@ -242,85 +141,6 @@ cmake_dependent_option(CDAT_BUILD_OFFSCREEN "Use OSMesa for offscreen rendering.
 # Option to enable vtkweb for cdatweb
 option(CDAT_BUILD_WEB "Build in Web support (VTKWeb, etc.)" OFF)
 
-# Option to enable CMOR
-option(CDAT_BUILD_CMOR "Build CMOR" ON)
-
-# Option to choose between easy_install and pip (crunchy ssl/man in the middle prevents us to use pip here...
-set(EGG_INSTALLER "PIP" CACHE STRING "Which package installer to use")
-set_property(CACHE EGG_INSTALLER PROPERTY STRINGS "PIP" "EASY_INSTALL")
-set(PIP_CERTIFICATE "" CACHE STRING "Certificate to use for PIP (LLNL issue really)")
-
-# Options for various types of builds
-option(CDAT_USE_SYSTEM_PYTHON "Use system Python" OFF)
-
-# Default state
-set(CDAT_BUILD_LEAN OFF)
-set(CDAT_BUILD_ALL OFF)
-
-# Some more options
-option(CDAT_BUILD_GUI "Builds GUI-based dependencies (Vistrails, ParaView, VisIt, R, etc.) " ON)
-option(CDAT_BUILD_GRAPHICS "Build graphics-based dependencies (vcs, pyqt, Vistrails, ParaView, VisIt, R, etc.) " ON)
-option(CDAT_BUILD_ESGF "Alias for CDAT_BUILD_LEAN" OFF)
-option(CDAT_BUILD_UVCMETRICSPKG "Builds uvcmetrics package " ON)
-option(CDAT_BUILD_PARAVIEW "Build ParaView rather than just VTK" OFF)
-option(CDAT_DOWNLOAD_UVCMETRICS_TESTDATA "Download test data uvcmetrics package " ON)
-
-# If ESGF option is on then our build mode is LEAN.
-if (CDAT_BUILD_ESGF)
-  if( (DEFINED CDAT_BUILD_MODE) AND (NOT "${CDAT_BUILD_MODE}" STREQUAL "LEAN") )
-    message(WARNING "[INFO] CDAT_BUILD_ESGF enabled, forcing CDAT_BUILD_MODE to LEAN")
-  endif()
-  set(CDAT_BUILD_MODE "LEAN" CACHE STRING "Build mode for CDAT <ALL, LEAN, DEFAULT>" FORCE)
-  set(CDAT_DOWNLOAD_SAMPLE_DATA OFF)
-endif()
-set(CDAT_BUILD_MODE "DEFAULT" CACHE STRING "Build mode for CDAT <ALL, LEAN, DEFAULT>")
-set_property(CACHE CDAT_BUILD_MODE PROPERTY STRINGS "DEFAULT" "ALL" "LEAN")
-message([INFO] BUILD MODE: ${CDAT_BUILD_MODE})
-
-# Set the state of LEAN all based on the MODE
-if (CDAT_BUILD_MODE STREQUAL "LEAN")
-  set(CDAT_BUILD_LEAN ON)
-  set(CDAT_BUILD_ALL OFF)
-elseif (CDAT_BUILD_MODE STREQUAL "ALL")
-  set(CDAT_BUILD_LEAN OFF)
-  set(CDAT_BUILD_ALL ON)
-elseif (CDAT_BUILD_MODE STREQUAL "DEFAULT")
-  set(CDAT_BUILD_LEAN OFF)
-  set(CDAT_BUILD_ALL OFF)
-else()
-  message(FATAL_ERROR "[ERROR] Unknown CDAT_BUILD_MODE \"${CDAT_BUILD_MODE}\" VALID MODES ARE \"DEFAULT\" \"ALL\" \"LEAN\"")
-endif()
-
-# First of all if LEAN then turn OFF GRAPHICS and PARALLEL
-if (CDAT_BUILD_LEAN)
-  set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE OFF)
-  set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE OFF)
-  set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE OFF)
-  set(CDMS_ONLY --enable-cdms-only)
-else()
-  set(CDMS_ONLY "")
-endif()
-
-# If ALL is enabled then turn ON GUI, GRAPHICS, and PARALLEL
-if (CDAT_BUILD_ALL)
-  set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE ON)
-  set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE ON)
-  set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE ON)
-  set_property(CACHE CDAT_BUILD_ESGF PROPERTY VALUE OFF)
-  set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE ON)
-  set(CDMS_ONLY "")
-endif()
-
-# If no graphics then no gui as well
-if (NOT CDAT_BUILD_GRAPHICS)
-  set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF)
-endif()
-
-# Don't build GUI if this is an offscreen-only build:
-if(CDAT_BUILD_OFFSCREEN AND CDAT_BUILD_GUI)
-  message("[INFO] Turning off CDAT_BUILD_GUI; incompatible with CDAT_BUILD_OFFSCREEN.")
-  set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF)
-endif()
 
 set(ep_prefix ${cdat_BINARY_DIR}/build/prefix)
 set_property(DIRECTORY PROPERTY ep_log_dir ${cdat_BINARY_DIR}/logs)
@@ -418,228 +238,16 @@ endif()
 # when left to create them.
 #=============================================================================
 set(CDAT_PACKAGE_CACHE_DIR
-#  ${CMAKE_CURRENT_BINARY_DIR}/../cdat_dependencies"
   "${CMAKE_CURRENT_BINARY_DIR}"
   CACHE PATH
   "Directory where source tar balls of external dependencies are kept"
 )
 
 include(ExternalProject)
-
-file(MAKE_DIRECTORY ${cdat_EXTERNALS})
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/lib)
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/bin)
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/include)
-file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/logs)
-file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/build)
-file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/sources)
-
-# Configure cdat command files
-#=============================================================================
-set(cdat_configure_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake)
-set(cdat_make_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake)
-set(cdat_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake)
-
-# Include essential packages
-#=============================================================================
-set(external_packages)
-set(found_system_include_dirs)
-set(found_system_libraries)
-
-include(python_pkg)
-if (APPLE)
-  set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals")
-  set(SB_DIR "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}")
-else()
-  set(SB_DIR "${CMAKE_INSTALL_PREFIX}")
-  # Helper variables to locate programs and libraries
-  set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals")
-endif()
-
-set(SB_LIB_DIR "${SB_DIR}/lib")
-set(SB_BIN_DIR "${SB_DIR}/bin")
-
-include(basemap_pkg)
 include(cdat_pkg)
-include(clapack_pkg)
-#include(curl_pkg)
-include(configobj_pkg)
-include(cycler_pkg)
-include(cython_pkg)
-include(data_pkg)
-include(esmf_pkg)
-include(x264_pkg)
-include(ffmpeg_pkg)
-include(pyflakes_pkg)
-include(pep8_pkg)
-include(mccabe_pkg)
-include(flake8_pkg)
-include(g2clib_pkg)
-include(proj4_pkg)
-include(ocgis_pkg)
-include(cligj_pkg)
-include(click_pkg)
-include(fiona_pkg)
-include(pynetcdf4_pkg)
-include(gdal_pkg)
-include(geos_pkg)
-include(gsw_pkg)
-include(gui_support_pkg)
-include(h5py_pkg)
-include(hdf5_pkg)
-include(zmq_pkg)
-include(pyzmq_pkg)
-include(tornado_pkg)
-include(ipython_pkg)
-include(jasper_pkg)
-include(lapack_pkg)
-include(lepl_pkg)
-include(libcf_pkg)
-include(lats_pkg)
-include(libdrs_pkg)
-include(libdrsfortran_pkg)
-include(ezget_pkg)
-include(cd77_pkg)
-include(matplotlib_pkg)
-include(six_pkg)
-include(openssl_pkg)
-include(cryptography_pkg)
-include(enum34_pkg)
-include(idna_pkg)
-include(pyasn1_pkg)
-include(ipaddress_pkg)
-include(cffi_pkg)
-include(ffi_pkg)
-include(dateutils_pkg)
-include(pyparsing_pkg)
-include(pycparser_pkg)
-include(md5_pkg)
-include(mpi4py_pkg)
-include(pyopenssl_pkg)
-include(setuptools_pkg)
-include(myproxyclient_pkg)
-include(netcdf_pkg)
-include(numexpr_pkg)
-include(numpy_pkg)
-include(mpi_pkg)
-include(osmesa_pkg)
-include(seawater_pkg)
-include(vacumm_pkg)
-if (CDAT_BUILD_PARAVIEW)
-  include(paraview_pkg)
-else()
-  include(vtk_pkg)
-endif()
-include(pkgconfig_pkg)
-include(libcdms_pkg)
-include(sampledata_pkg)
-include(pyspharm_pkg)
-include(pytables_pkg)
-include(readline_pkg)
-include(r_pkg)
-include(rpy2_pkg)
-include(singledispatch_pkg)
-include(scikits_pkg)
-include(scipy_pkg)
-## Part of setuptools no need to extra build it
-## include(distribute_pkg)
-if (NOT CDAT_USE_SYSTEM_PYTHON)
-  include(pip_pkg)
-endif()
-include(shapely_pkg)
-include(pygments_pkg)
-include(markupsafe_pkg)
-include(jinja2_pkg)
-include(docutils_pkg)
-include(sphinx_pkg)
-include(freetype_pkg)
-include(coverage_pkg)
-## C. Doutriaux: We need to replace the following with a findPackage at some point
-if (APPLE)
-else()
-  include(jpeg_pkg)
-  include(pixman_pkg)
-  include(fontconfig_pkg)
-  include(curses_pkg)
-  #include(tiff_pkg)
-  include(netcdfplus_pkg)
-endif()
-#include(geotiff_pkg)
-include(cmor_pkg)
-include(udunits2_pkg)
-include(uuid_pkg)
-# IF we build the UVCDAT Metrics package
-if (CDAT_BUILD_UVCMETRICSPKG)
-  if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA)
-    set(UVCMETRICS_TEST_DATA_DIRECTORY ${CMAKE_INSTALL_PREFIX}/share/uvcmetrics/test_data CACHE PATH "DIR FOR UVCMETRICS TEST DATA" )
-  endif()
-  include(uvcmetrics_pkg)
-endif()
-include(vistrails_pkg)
-#include(yasm_pkg)
-include(pylibxml2_pkg)
-include(cdatlogger_pkg)
-include(pyclimate_pkg)
-include(scientificpython_pkg)
-include(windspharm_pkg)
-include(eof2_pkg)
-include(eofs_pkg)
-include(windfield_pkg)
-if (CDAT_BUILD_ESGF)
-    include(lxml_pkg)
-endif()
-
-if (CDAT_BUILD_GUI)
-  include(qt4_pkg)
-  if (NOT CDAT_USE_SYSTEM_PYTHON)
-    include(sip_pkg)
-    include(pyqt_pkg)
-  endif()
-  include(spyder_pkg)
-endif()
-
-# Configure custom configure/build/install step files
-#=============================================================================
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_common_environment.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_cmake_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_cmake_make_step.cmake
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cleanenv_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdatmpi_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cdatmpi_configure_step.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/fix_install_name.py.in
-    ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py
-    @ONLY
-)
-
+# CONDA Options
+set(CONDA_ENVIRONMENT_NAME ${cdat_VERSION} CACHE STRING "Name of conda environment we want to build CDAT in")
+set(CONDA_CHANNEL_UVCDAT uvcdat CACHE STRING "channels to use (if more than one use '-c' between channels e.g. uvcdat/label/nightly -c uvcdat)")
 # Now sort and include external packages
 #=============================================================================
 include(TopologicalSort)
@@ -670,368 +278,23 @@ foreach(package ${external_packages})
     include("${lc_package}_external")
   endif()
 endforeach()
-file(WRITE ${PARTS_BUILT_INFO} ${packages_info})
-
-# Construct Include and Link variables
-#=============================================================================
-if(found_system_include_dirs)
-  list(REMOVE_DUPLICATES found_system_include_dirs)
-  list(REMOVE_ITEM found_system_include_dirs ${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES} ${CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES})
-  set(cdat_external_include_directories)
-  foreach(include_dir ${found_system_include_dirs})
-    set(cdat_external_include_directories "-I${include_dir} ${cdat_external_include_directories}")
-  endforeach()
-endif()
-message("[INFO] CDAT external include directories: ${cdat_external_include_directories}")
-
-message("[INFO] System libraries: ${found_system_libraries}")
-if(found_system_libraries)
-  list(REMOVE_DUPLICATES found_system_libraries)
-  list(REMOVE_ITEM found_system_libraries ${CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES})
-  set(cdat_external_link_directories)
-  foreach(library_dir ${found_system_libraries})
-    set(cdat_external_link_directories "-L${library_dir} ${cdat_external_link_directories}")
-  endforeach()
-endif()
-message("[INFO] CDAT external link directories: ${cdat_external_link_directories}")
-
-# Configure remaining files
-#=============================================================================
-
-# set candidate paths for setup_runtime scripts
-# will be added to environment variables in reverse order
-set(SETUP_LIBRARY_PATHS
-  "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} "
-  "Externals/lib/R/lib "
-  "Externals/lib "
-  "Externals/proj4/lib "
-  "Externals/lib64 "
-  "lib "
-)
-string(REPLACE ";" " " SETUP_LIBRARY_PATHS ${SETUP_LIBRARY_PATHS})
-set(SETUP_EXECUTABLE_PATHS
-  "Externals/paraview.app/Contents/bin "
-  "Library/Frameworks/Python.framework/Versions/${PYVER}/bin "
-  "Externals/bin "
-  "bin "
-)
-string(REPLACE ";" " " SETUP_EXECUTABLE_PATHS ${SETUP_EXECUTABLE_PATHS})
-set(SETUP_PYTHON_PATHS
-  "Externals/paraview.app/Contents/Python "
-  "Externals/lib/python${PYVER}/site-packages "
-  "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}/site-packages "
-  "lib/python${PYVER}/site-packages "
-)
-string(REPLACE ";" " " SETUP_PYTHON_PATHS ${SETUP_PYTHON_PATHS})
-include(GetGitRevisionDescription)
-git_describe(UVCDAT_PROMPT_STRING)
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install.py.in
-    ${cdat_SOURCE_DIR}/installation/install.py
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/checked_get.sh.in
-  ${cdat_BINARY_DIR}/checked_get.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/predownload.py.in
-  ${cdat_BINARY_DIR}/predownload.py
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.sh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/preofflinebuild.sh.in
-  ${cdat_BINARY_DIR}/preofflinebuild.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.csh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.csh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.csh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.in
-  ${CMAKE_INSTALL_PREFIX}/bin/uvcdat
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat.in
-  ${CMAKE_INSTALL_PREFIX}/bin/cdat
-  @ONLY
-)
 
 configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest.in
-  ${CMAKE_INSTALL_PREFIX}/bin/runtest
-  @ONLY
-)
-
-
-if (BUILD_TESTING)
-  configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runpytest.in
-    ${CMAKE_INSTALL_PREFIX}/bin/runpytest
+    ${cdat_CMAKE_BINARY_DIR}/runtest
     @ONLY
-  )
-  add_subdirectory(testing)
-endif()
-
-# Where to install the wrapper scripts
-set(WRAPPER_INSTALL_LOCATION ${CMAKE_INSTALL_PREFIX}/wrappers
-    CACHE PATH
-    "Install wrapper scripts 'cdat', 'uvcdat' and 'loadcdat' in that directory")
-
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh
-        ${WRAPPER_INSTALL_LOCATION}/loadcdat)
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh
-        ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh)
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/uvcdat
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/uvcdat
-        ${WRAPPER_INSTALL_LOCATION}/uvcdat)
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/cdat
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/cdat
-        ${WRAPPER_INSTALL_LOCATION}/cdat)
-
-add_custom_target(wrappers ALL DEPENDS
-                  ${WRAPPER_INSTALL_LOCATION}/loadcdat
-                  ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh
-                  ${WRAPPER_INSTALL_LOCATION}/uvcdat
-                  ${WRAPPER_INSTALL_LOCATION}/cdat)
-
-# Package UV-CDAT with CPACK
-include(InstallRequiredSystemLibraries)
-
-set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "UVCDAT")
-set(CPACK_PACKAGE_VENDOR "UVCDAT")
-set(CPACK_PACKAGE_NAME "UVCDAT")
-set(CPACK_PACKAGE_VERSION_MAJOR "2")
-set(CPACK_PACKAGE_VERSION_MINOR "3")
-set(CPACK_PACKAGE_VERSION_PATCH "0")
-set(CPACK_PACKAGE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH})
-set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/docs/README.txt")
-set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/docs/Legal.txt")
-set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Aashish Chaudhary") #required
-set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
-set(CPACK_RPM_PACKAGE_PROVIDES /usr/local/uvcdat/bin/python /usr/local/uvcdat/bin/python2.7)
-set(CPACK_DESTINATION_BIN_PREFIX "bin")
-
-if (APPLE)
-  set(SB_EXTERNALS_DIR "Externals")
-  set(SB_LIB_DIR "Library/Frameworks/Python.framework/Versions/2.7/lib")
-  set(CPACK_GENERATOR DragNDrop)
-  set(CPACK_DESTINATION_PREFIX "\${CMAKE_INSTALL_PREFIX}/UVCDAT.app/Contents")
-  set(CPACK_DESTINATION_BIN_PREFIX "${CPACK_DESTINATION_PREFIX}/MacOS")
-endif()
-
-include(CPack)
-
-
-install(CODE "
-  set(SB_EXTERNALS_DIR ${SB_EXTERNALS_DIR})
-  set(SB_LIB_DIR ${SB_LIB_DIR})
-  set(PYVER ${PYVER})
-  set(PARAVIEW_MAJOR ${PARAVIEW_MAJOR})
-  set(PARAVIEW_MINOR ${PARAVIEW_MINOR})
-  set(VISIT_VERSION ${VISIT_VERSION})
-  set(CDAT_BUILD_PARAVIEW ${CDAT_BUILD_PARAVIEW})
-  set(SETUP_EXECUTABLE_PATHS \"${SETUP_EXECUTABLE_PATHS}\")
-  set(SETUP_PYTHON_PATHS \"${SETUP_PYTHON_PATHS}\")
-  set(SETUP_LIBRARY_PATHS \"${SETUP_LIBRARY_PATHS}\")
-
-  file(GLOB_RECURSE programs \"${CMAKE_INSTALL_PREFIX}/bin/*\")
-  file(GLOB programs_images \"${CMAKE_INSTALL_PREFIX}/bin/images/*\")
-  file(GLOB programs_tutorials \"${CMAKE_INSTALL_PREFIX}/bin/tutorials/*\")
-
-  if (NOT \"\${programs_images}\" STREQUAL \"\" OR NOT \"\${programs_tutorials}\" STREQUAL \"\")
-    list(REMOVE_ITEM programs \${programs_images} \${programs_tutorials})
-  endif()
-
-  set (resolved_programs \"\")
-  foreach (program \${programs})
-    get_filename_component(res_program \"\${program}\" REALPATH)
-    set (regex_match \"\")
-    # Do not install uuid as its dependencies are not resolved when using
-    # RPMBuild
-    file (STRINGS \"\${res_program}\" regex_match REGEX \"uuid\")
-    if (\"\${regex_match}\" STREQUAL \"\")
-      file (STRINGS \"\${res_program}\" regex_match REGEX \"#!${CMAKE_INSTALL_PREFIX}\")
-      if (\"\${regex_match}\" STREQUAL \"\")
-        list (APPEND resolved_programs \"\${res_program}\")
-      endif ()
-    endif ()
-  endforeach()
-  
-
-  file(INSTALL FILES \${resolved_programs} DESTINATION
-    \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-       PERMISSIONS USE_SOURCE_PERMISSIONS
-  )
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/images\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/images\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/images DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/tutorials DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Externals\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Externals\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Externals DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-      PERMISSIONS USE_SOURCE_PERMISSIONS
-      REGEX \"uuid\" EXCLUDE
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/include\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/include\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/include DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/lib\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/lib\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/lib DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  # Patch cgi.py to look for installed python
-  if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\")
-    file (READ \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\" CGI_FILE)
-    SET (SEARCH_REGEX \"\\\#! /usr/local/bin/python\")
-    SET (REPLACEMENT_TEXT \"#! /usr/bin/env python\")
-    STRING (REGEX REPLACE \"\${SEARCH_REGEX}\" \"\${REPLACEMENT_TEXT}\"
-      MODIFIED_FILE \"\${CGI_FILE}\")
-    file (WRITE \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\"
-      \"\${MODIFIED_FILE}\")
-  endif ()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/share\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/share\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/share DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/wrappers\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/wrappers\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/wrappers DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/man\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/man\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/man DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/vistrails\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/vistrails\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/vistrails DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
     )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Library\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Library\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Library DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-  
-  file(INSTALL FILES ${cdat_BINARY_DIR}/build_info.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/info)
-
-  # Unset QT_LIB_DIR as we need to use the one in user's environment
-  # We need to keep in ming that we might need to build Qt on some systems
-  # (e.g. RH6) in which case this might break something
-  set(QT_LIB_DIR)
-
-  # Configure the environment setup script to point to the installation
-  # Creating a temporary file that will be installed.
-  configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in
-    \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/setup_runtime.sh\"
+configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash.in
+    ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash
     @ONLY
-  )
-
-  # Finally, create a symlink for python to point to point to installed python
-  if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\"
-      AND
-      NOT EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\")
-    execute_process(COMMAND \${CMAKE_COMMAND} -E create_symlink
-      \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\"
-      \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\"
-      )
-  endif ()
-
-  if (APPLE)
-    configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.mac.in
-      ${CPACK_DESTINATION_BIN_PREFIX}/uvcdat
-      @ONLY
     )
-    execute_process(COMMAND \${CMAKE_COMMAND} -E copy_directory ${cdat_SOURCE_DIR}/resources
-      ${CPACK_DESTINATION_PREFIX}/Resources
-      OUTPUT_VARIABLE out
-      RESULT_VARIABLE res
-      ERROR_VARIABLE err
-    )
-    if(NOT \${res} EQUAL 0)
-      message(\"Output: \${out}; Result: \${res}; Error: \${err}\")
-    endif()
 
-    execute_process(COMMAND \${CMAKE_COMMAND} -E copy ${cdat_CMAKE_SOURCE_DIR}/uvcdat.plist
-      ${CPACK_DESTINATION_PREFIX}/Info.plist
-      OUTPUT_VARIABLE out
-      RESULT_VARIABLE res
-      ERROR_VARIABLE err
+configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/clean_cdat_from_conda.bash.in
+    ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash
+    @ONLY
     )
-    if(NOT \${res} EQUAL 0)
-      message(\"Output: \${out}; Result: \${res}; Error: \${err}\")
-    endif()
+add_subdirectory(testing)
 
-    execute_process(COMMAND ${PYTHON_EXECUTABLE} ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py
-      ${CPACK_DESTINATION_PREFIX}
-      OUTPUT_VARIABLE out
-      RESULT_VARIABLE res
-      ERROR_VARIABLE err
+# Clean conda env
+add_custom_target(clean-conda
+    COMMAND ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash
     )
-    if(NOT \${res} EQUAL 0)
-      message(\"Output: \${out}; Result: \${res}; Error: \${err}\")
-    endif()
-  endif()"
-
-  COMPONENT superbuild
-)
-
diff --git a/Changes.txt b/Changes.txt
deleted file mode 100644
index bc7cd069ea1d1dc81e1c4db906c49c7c12dbea65..0000000000000000000000000000000000000000
--- a/Changes.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-[updated_packages_versions]: Added distribute, added option to choose between ip and easy_install, added option to use cert for pip
-[updated_packages_versions]: Upgraded Packages to latest version
-1.3.1
diff --git a/docs/Legal.htm b/LEGAL.htm
similarity index 100%
rename from docs/Legal.htm
rename to LEGAL.htm
diff --git a/docs/Legal.txt b/LEGAL.txt
similarity index 100%
rename from docs/Legal.txt
rename to LEGAL.txt
diff --git a/Packages/DV3D/ConfigurationFunctions.py b/Packages/DV3D/ConfigurationFunctions.py
index c3bff88c3ebe3bc6bf22e45ba2d2b9f5d1704a00..98e1947b7352e739e3fa01da1abc27c444f666aa 100644
--- a/Packages/DV3D/ConfigurationFunctions.py
+++ b/Packages/DV3D/ConfigurationFunctions.py
@@ -245,7 +245,7 @@ class ConfigManager:
         if ( self.parent <> None ):
             for parm_address in self.parent.parameters.keys():
                 basename = get_parameter_name( parm_address )
-                self.parameters[basename] = self.getParameter( basename  )
+                self.parameters[basename] = ConfigParameter(basename, parent=self.parent.getParameter(basename))
         self.initialized = False
 
     def clear( self, cell ):
@@ -264,7 +264,6 @@ class ConfigManager:
             if self.parent is None:
                 cparm = ConfigParameter( param_name, **args )
             else:
-#                print "Getting config param from parent: ", param_name
                 cparm_parent = self.parent.getParameter( param_name, cell=self.cell_coordinates )
                 cparm = ConfigParameter( param_name, parent=cparm_parent, **args )
             self.addParam( param_name, cparm )
diff --git a/Packages/Thermo/Lib/thermo.py b/Packages/Thermo/Lib/thermo.py
index c2d5ccc58d2fc7d52eee5a8b9beeae15ec91e2c5..9f8cc6a93a1058c6390573240003673a44453f72 100644
--- a/Packages/Thermo/Lib/thermo.py
+++ b/Packages/Thermo/Lib/thermo.py
@@ -4,9 +4,9 @@ import cdms2
 import genutil
 import unidata
 import vcs
+import numpy
 from vcs import VCS_validation_functions
 thermo_objects = []
-import numpy
 
 
 def Es(T, method=None):
diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index dd2a2792dbeacf7775403a4b7c96d2db295e66a6..f15d7a0691bad18d12128942b12b019aa124262e 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -363,6 +363,16 @@ class AbstractVariable(CdmsObj, Slab):
             if axisMatches(self.getAxis(i), axis_spec):
                 return i
         return -1
+    
+    def hasCellData(self):
+        '''
+        If any of the variable's axis has explicit bounds, we have cell data
+        otherwise we have point data.
+        '''
+        for axis in self.getAxisList():
+            if (axis.getExplicitBounds() is not None):
+                return True
+        return False
 
     def getAxisListIndex (self, axes=None, omit=None, order=None):
         """Return a list of indices of axis objects;
@@ -511,7 +521,6 @@ class AbstractVariable(CdmsObj, Slab):
                 
         return result
 
-
     # Get an order string, such as "tzyx"
     def getOrder(self, ids=0):
         """getOrder(ids=0) returns the order string, such as tzyx.
@@ -915,7 +924,9 @@ class AbstractVariable(CdmsObj, Slab):
                 else:
                     delta_beg_wrap_dimvalue = ncyclesrev*cycle
 
-                axis.setBounds(axis.getBounds() - delta_beg_wrap_dimvalue)
+                isGeneric = [False]
+                b = axis.getBounds(isGeneric) - delta_beg_wrap_dimvalue
+                axis.setBounds(b, isGeneric=isGeneric[0])
                 
                 axis[:]= (axis[:] - delta_beg_wrap_dimvalue).astype(axis.typecode())
 
diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index df27788541dc802083729638ad60af2c313ec576..209fbda429930b0252175d9a7136e19b019c6d99 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -105,8 +105,8 @@ def getAutoBounds():
     return _autobounds
 
 # Create a transient axis
-def createAxis(data, bounds=None, id=None, copy=0):
-    return TransientAxis(data, bounds, id, copy=copy)
+def createAxis(data, bounds=None, id=None, copy=0, genericBounds=False):
+    return TransientAxis(data, bounds=bounds, id=id, copy=copy, genericBounds=genericBounds)
 
 # Generate a Gaussian latitude axis, north-to-south
 def createGaussianAxis(nlat):
@@ -959,13 +959,41 @@ class AbstractAxis(CdmsObj):
     def isLinear(self):
         raise CDMSError, MethodNotImplemented
 
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        '''
+        isGeneric is a list with one boolean which says if the bounds
+        are read from file (False) or generated (True)
+        '''
         raise CDMSError, MethodNotImplemented
 
-    # Return None if not explicitly defined
     def getExplicitBounds(self):
+        '''
+        Return None if not explicitly defined
+        This is a way to determine if attributes are defined at cell
+        or at point level. If this function returns None attributes are
+        defined at points, otherwise they are defined at cells
+        '''
         raise CDMSError, MethodNotImplemented
 
+    def getBoundsForDualGrid(self, dualGrid):
+        '''
+        dualGrid changes the type of dataset from the current type to the dual.
+        So, if we have a point dataset we switch to a cell dataset and viceversa.
+        '''
+        explicitBounds = self.getExplicitBounds()
+        if (explicitBounds is None):
+            # point data
+            if (dualGrid):
+                return self.getBounds()
+            else:
+                return None
+        else:
+            # cell data
+            if (dualGrid):
+                return None
+            else:
+                return explicitBounds
+
     def setBounds(self, bounds):
         raise CDMSError, MethodNotImplemented
 
@@ -1279,7 +1307,8 @@ class AbstractAxis(CdmsObj):
         The stride k can be positive or negative. Wraparound is
         supported for longitude dimensions or those with a modulus attribute.
         """
-        fullBounds = self.getBounds()
+        isGeneric = [False]
+        fullBounds = self.getBounds(isGeneric)
         _debug=0
         _debugprefix="SS__XX subaxis "
         
@@ -1359,7 +1388,7 @@ class AbstractAxis(CdmsObj):
             else:
                 bounds = None
         
-        newaxis = TransientAxis(data, bounds, id=self.id, copy=1)
+        newaxis = TransientAxis(data, bounds, id=self.id, copy=1, genericBounds=isGeneric[0])
 
         if self.isLatitude(): newaxis.designateLatitude()
         if self.isLongitude(): newaxis.designateLongitude()
@@ -1440,17 +1469,18 @@ class AbstractAxis(CdmsObj):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        b = self.getBounds()
+        isGeneric = [False]
+        b = self.getBounds(isGeneric)
         if copyData==1:
             mycopy = createAxis(copy.copy(self[:]))
         else:
             mycopy = createAxis(self[:])
         mycopy.id = self.id
         try:
-            mycopy.setBounds(b)
+            mycopy.setBounds(b, isGeneric=isGeneric[0])
         except CDMSError:
             b = mycopy.genGenericBounds()
-            mycopy.setBounds(b)
+            mycopy.setBounds(b, isGeneric=False)
         for k, v in self.attributes.items():
            setattr(mycopy, k, v)
         return mycopy
@@ -1570,7 +1600,13 @@ class Axis(AbstractAxis):
         return self._node_.dataRepresent==cdmsNode.CdLinear
 
     # Return the bounds array, or generate a default if autoBounds mode is on
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        '''
+        If isGeneric is a list with one element, we set its element to True if the
+        bounds were generated and False if bounds were read from the file.
+        '''
+        if (isGeneric):
+            isGeneric[0] = False
         boundsArray = self.getExplicitBounds()
         try:
             self.validateBounds(boundsArray)
@@ -1578,6 +1614,8 @@ class Axis(AbstractAxis):
             boundsArray = None
         abopt = getAutoBounds()
         if boundsArray is None and (abopt==1 or (abopt==2 and (self.isLatitude() or self.isLongitude()))) :
+            if (isGeneric):
+                isGeneric[0] = True
             boundsArray = self.genGenericBounds()
             
         return boundsArray
@@ -1609,7 +1647,10 @@ class Axis(AbstractAxis):
 # In-memory coordinate axis
 class TransientAxis(AbstractAxis):
     axis_count = 0
-    def __init__(self, data, bounds=None, id=None, attributes=None, copy=0):
+    def __init__(self, data, bounds=None, id=None, attributes=None, copy=0, genericBounds=False):
+        '''
+        genericBounds specify if bounds were generated (True) or read from a file (False)
+        '''
         AbstractAxis.__init__(self, None, None)
         if id is None:
             TransientAxis.axis_count = TransientAxis.axis_count + 1
@@ -1646,7 +1687,8 @@ class TransientAxis(AbstractAxis):
             self._data_ = numpy.array(data)
 
         self._doubledata_ = None
-        self.setBounds(bounds)
+        self._genericBounds_ = genericBounds
+        self.setBounds(bounds, isGeneric=genericBounds)
 
     def __getitem__(self, key):
         return self._data_[key]
@@ -1663,10 +1705,15 @@ class TransientAxis(AbstractAxis):
     def __len__(self):
         return len(self._data_)
 
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        if (isGeneric):
+            isGeneric[0] = self._genericBounds_
         if self._bounds_ is not None:
             return copy.copy(self._bounds_)
         elif (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+            if (isGeneric):
+                isGeneric[0] = True
+            self._genericBounds_ = True
             return self.genGenericBounds()
         else:
             return None
@@ -1675,14 +1722,17 @@ class TransientAxis(AbstractAxis):
         return self._data_
 
     def getExplicitBounds(self):
-        return copy.copy(self._bounds_)
+        if (self._genericBounds_):
+            return None
+        else:
+            return copy.copy(self._bounds_)
 
     # Set bounds. The persistent argument is for compatibility with
     # persistent versions, is ignored. Same for boundsid and index.
     #
     # mf 20010308 - add validate key word, by default do not validate
-    #
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    # isGeneric is False if bounds were generated, True if they were read from a file
+    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if bounds is not None:
             if isinstance(bounds, numpy.ma.MaskedArray):
                 bounds = numpy.ma.filled(bounds)
@@ -1700,9 +1750,11 @@ class TransientAxis(AbstractAxis):
                     bounds2[:,1]=bounds[1::]
                     bounds=bounds2
             self._bounds_ = copy.copy(bounds)
+            self._genericBounds_ = isGeneric
         else:
             if (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
                 self._bounds_ = self.genGenericBounds()
+                self._genericBounds_ = True
             else:
                 self._bounds_ = None
 
@@ -1745,7 +1797,7 @@ class TransientVirtualAxis(TransientAxis):
         "Return true iff coordinate values are implicitly defined."
         return 1
 
-    def setBounds(self, bounds):
+    def setBounds(self, bounds, isGeneric=False):
         "No boundaries on virtual axes"
         self._bounds_ = None
 
@@ -1953,13 +2005,19 @@ class FileAxis(AbstractAxis):
         return 0                        # All file axes are vector representation
 
     # Return the bounds array, or generate a default if autobounds mode is set
-    def getBounds(self):
+    # If isGeneric is a list with one element, we set its element to True if the
+    # bounds were generated and False if bounds were read from the file.
+    def getBounds(self, isGeneric=None):
+        if (isGeneric):
+            isGeneric[0] = False
         boundsArray = self.getExplicitBounds()
         try:
             boundsArray = self.validateBounds(boundsArray)
         except Exception,err:
             boundsArray = None
         if boundsArray is None and (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+            if (isGeneric):
+                isGeneric[0] = True
             boundsArray = self.genGenericBounds()
             
         return boundsArray
@@ -1989,7 +2047,8 @@ class FileAxis(AbstractAxis):
     # index in the extended dimension (default is index=0).
     # If the bounds variable is new, use the name boundsid, or 'bounds_<varid>'
     # if unspecified.
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    # isGeneric is only used for TransientAxis
+    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if persistent:
             if index is None:
                 if validate:
@@ -2250,7 +2309,7 @@ def axisMatches(axis, specification):
 
        3. an axis object; will match if it is the same object as axis.
     """   
-    if isinstance(specification, types.StringType):
+    if isinstance(specification, basestring):
         s = string.lower(specification)
         s = s.strip()
         while s[0] == '(':
diff --git a/Packages/cdms2/Lib/cdscan.py b/Packages/cdms2/Lib/cdscan.py
new file mode 100755
index 0000000000000000000000000000000000000000..59896b1b43a58b5a1192d31915eaa88cf3d13ae2
--- /dev/null
+++ b/Packages/cdms2/Lib/cdscan.py
@@ -0,0 +1,1688 @@
+#!/usr/bin/env python
+
+import sys
+import getopt
+import cdms2
+from cdms2.grid import lookupArray
+from cdms2.axis import calendarToTag, tagToCalendar
+from cdms2.cdmsobj import CdFromObject,CdString,CdScalar,CdFloat,CdDouble,CdShort,CdInt,CdLong
+import numpy
+import string
+import cdtime
+import os.path
+import copy
+import types
+from cdms2 import cdmsNode
+import re
+
+usage = """Usage:
+    cdscan [options] <files>
+
+    Scan a list of files producing a CDMS dataset in XML representation. See Notes below
+    for a more complete explanation.
+
+Arguments:
+
+    <files> is a list of file paths to scan. The files can be listed in any order, and may
+    be in multiple directories.  A file may also be a CDML dataset (.xml or .cdml), in
+    which case the dataset(s) and files are combined into a new dataset.
+
+Options:
+
+    -a alias_file: change variable names to the aliases defined in an alias file.
+                   Each line of the alias file consists of two blank separated
+                   fields: variable_id alias. 'variable_id' is the ID of the variable
+                   in the file, and 'alias' is the name that will be substituted for
+                   it in the output dataset. Only variables with entries in the alias_file
+                   are renamed.
+
+    -c calendar:   either "gregorian", "proleptic_gregorian", "julian", "noleap", or "360_day". Default:
+                   "gregorian". This option should be used with caution, as it will
+                   override any calendar information in the files.
+
+    -d dataset_id: dataset identifier. Default: "none"
+
+    -e newattr:	   Add or modify attributes of a file, variable, or
+		   axis. The form of 'newattr' is either:
+
+		   'var.attr = value' to modify a variable or attribute, or
+		   '.attr = value' to modify a global (file) attribute.
+
+		   In either case, 'value' may be quoted to preserve spaces
+		   or force the attribute to be treated as a string. If
+		   'value' is not quoted and the first character is a
+		   digit, it is converted to integer or
+		   floating-point. This option does not modify the input
+		   datafiles. See notes and examples below.
+
+    --exclude var,var,...
+                   Exclude specified variables. The argument
+                   is a comma-separated list of variables containing no blanks.
+                   In contrast to --exclude-file, this skips the variables regardless
+                   of the file(s) in which they are contained, but processes other
+                   variables in the files.
+                   Also see --include.
+
+    --exclude-file pattern
+                   Exclude files with a basename matching the regular expression pattern.
+                   In contrast to --exclude, this skips the file entirely. Multiple patterns
+                   may be listed by separating with vertical bars (e.g. abc|def ). Note
+                   that the match is to the initial part of the basename. For example, the
+                   pattern 'st' matches any basename starting with 'st'.
+
+    -f file_list:  file containing a list of absolute data file names, one per
+                   line. <files> arguments are ignored.
+
+    --forecast     generate a description of a forecast dataset.
+                   This is not compatible with the -i, -r, -t, or -l options.
+                   A file can contain data for exactly one forecast; its
+                   forecast_reference_time (aka run time, analysis time, starting time,
+                   generating time, tau=0 time) is specified by the nbdate,nbsec variables.
+                   Each file's time axis will be interpreted as the forecast_period (aka
+                   tau, the interval from the forecast_reference_time to the current time)
+                   regardless of its units, standard_name, or other attributes.
+
+    -h:            print a help message.
+
+    -i time_delta: scan time as a 'linear' dimension. This is useful if the time dimension
+                   is very long. The argument is the time delta, a float or integer.  For
+                   example, if the time delta is 6 hours, and the reference units are
+                   "hours since xxxx", set the interval delta to 6.  The default value is
+                   the difference of the first two timepoints.
+
+    --ignore-open-error:
+                   Ignore open errors. Print a warning and continue.
+
+    --include var,var,...
+                   Only include specified variables in the output. The argument
+                   is a comma-separated list of variables containing no blanks.
+                   Also see --exclude.
+
+    --include-file pattern
+                   Only include files with a basename matching the regular expression pattern.
+                   In contrast to --include, this skips files entirely if they do not
+                   match the pattern. Multiple patterns
+                   may be listed by separating with vertical bars (e.g. abc|def ). Note
+                   that the match is to the initial part of the basename. For example, the
+                   pattern 'st' matches any basename starting with 'st'.
+
+    -j:		   scan time as a vector dimension. Time values are listed
+		   individually. Turns off the -i option.
+
+    -l levels:     list of levels, comma-separated. Only specify if files are partitioned by
+                   levels.
+
+    -m levelid:    name of the vertical level dimension. The default is the name of the
+                   vertical level dimension
+
+    --notrim-lat:  Don't trim latitude values (in degrees) to the range [-90..90]. By default
+		   latitude values are trimmed.
+
+    -p template:   Compatibility with pre-V3.0 datasets. 'cdimport -h' describes template strings.
+
+    -q:            quiet mode
+
+    -r time_units: time units of the form "<units> since yyyy-mm-dd hh:mi:ss", where
+                   <units> is one of "year", "month", "day", "hour", "minute", "second".
+                   Trailing fields may be omitted. The default is the units of the first
+                   time dimension found.
+
+    -s suffix_file: Append a suffix to variable names, depending on the directory
+                   containing the data file.  This can be used to distinguish variables
+                   having the same name but generated by different models or ensemble
+                   runs. 'suffix_file' is the name of a file describing a mapping between
+                   directories and suffixes.  Each line consists of two blank-separated
+                   fields: 'directory' 'suffix'. Each file path is compared to the
+                   directories in the suffix file. If the file path is in that directory
+                   or a subdirectory, the corresponding suffix is appended to the variable
+                   IDs in the file. If more than one such directory is found, the first
+                   directory found is used. If no match is made, the variable ids are not
+                   altered.  Regular expressions can be used: see the example in the Notes
+                   section.
+
+    -t timeid:     id of the partitioned time dimension. The default is the name of the time
+                   dimension.
+
+    --time-linear tzero,delta,units[,calendar]
+                   Override the time dimensions(s) with a linear time dimension. The arguments are
+                   a comma-separated list:
+                   
+                   tzero is the initial time point, a floating-point value.
+                   delta is the time delta, floating-point.
+                   units are time units as specified in the [-r] option.
+                   calendar is optional, and is specified as in the [-c] option. If omitted, it
+                     defaults to the value specified by [-c], otherwise as specified in the file.
+
+                   Example: --time-linear '0,1,months since 1980,noleap'
+
+                   Note (6) compares this option with [-i] and [-r]
+
+    --var-locate 'var,file_pattern':
+                   Only scan a variable if the basename of the file matches the pattern. This
+                   may be used to resolve duplicate variable errors. var and file_pattern are
+                   separated by a comma, with no blanks.
+                   
+                   var is the name of the variable
+                   file_pattern is a regular expression following the Python re module syntax.e
+
+                   Example: to scan variable ps from files starting with the string 'ps_':
+                     --var-locate 'ps,ps_.*'
+
+    -x xmlfile:    XML filename. By default, output is written to standard output.
+
+Example:
+
+    cdscan -c noleap -d test -x test.xml [uv]*.nc
+    cdscan -d pcmdi_6h -i 0.25 -r 'days since 1979-1-1' *6h*.ctl
+
+Notes:
+
+    (1) The files can be in netCDF, GrADS/GRIB, HDF, or DRS format, and can be listed in
+    any order. Most commonly, the files are the result of a single experiment, and the
+    'partitioned' dimension is time. The time dimension of a variable is the coordinate
+    variable having a name that starts with 'time' or having an attribute "axis='T'". If
+    this is not the case, specify the time dimension with the -t option. The time
+    dimension should be in the form supported by cdtime. If this is not the case (or to
+    override them) use the -r option.
+
+    (2) The basic form of the command is 'cdscan <files>'. By default, the time values are
+    listed explicitly in the output XML. This can cause a problem if the time dimension is
+    very long, say for 6-hourly data. To handle this the form 'cdscan -i delta <files>'
+    may be used. This generates a compact time representation of the form <start, length,
+    delta>. An exception is raised if the time dimension for a given file is not linear.
+
+    (3) Another form of the command is 'cdscan -l lev1,lev2,..,levn <files>'. This asserts
+    that the dataset is partitioned in both time and vertical level dimensions. The level
+    dimension of a variable is the dimension having a name that starts with "lev", or
+    having an attribute "axis=Z". If this is not the case, set the level name with the -m
+    option.
+
+    (4) An example of a suffix file:
+
+    /exp/pr/ncar-a  _ncar-a
+    /exp/pr/ecm-a   _ecm-a
+    /exp/ta/ncar-a  _ncar-a
+    /exp/ta/ecm-a   _ecm-a
+
+    For all files in directory /exp/pr/ncar-a or a subdirectory, the corresponding
+    variable ids will be appended with the suffix '_ncar-a'.  Regular expressions can be
+    used, as defined in the Python 're' module. For example, The previous example can be
+    replaced with the single line:
+
+    /exp/[^/]*/([^/]*) _\g<1>
+
+    Note the use of parentheses to delimit a group. The syntax \g<n> refers to the n-th
+    group matched in the regular expression, with the first group being n=1. The string
+    [^/]* matches any sequence of characters other than a forward slash.
+
+    (5) Adding or modifying attributes with the -e option:
+
+    time.units = "days since 1979-1-1"
+
+    sets the units of all variables/axes to "Days since 1979-1-1". Note
+    that since this is done before any other processing is done, it allows
+    overriding of non-COARDS time units.
+
+    .newattr=newvalue
+
+    Set the global file attribute 'newattr' to 'newvalue'.
+
+    (6) The [--time-linear] option overrides the time values in the file(s). The resulting
+    dimension does not have any gaps. In contrast, the [-i], [-r] options use the specified
+    time units (from [-r]), and calendar from [-c] if specified, to convert the file times
+    to the new units. The resulting linear dimension may have gaps.
+
+    In either case, the files are ordered by the time values in the files.
+
+    The [--time-linear] option should be used with caution, as it is applied to all the time
+    dimensions found.
+"""
+
+# Ensure that arrays are fully printed to XML files
+numpy.set_printoptions(threshold=numpy.inf)
+
+calendarMap = tagToCalendar
+
+reverseCalendarMap = calendarToTag
+
+attrPattern = re.compile(r'\s*(\w*)\.(\w+)\s*=\s*(.*)$')
+cdms2.setNetcdfUseParallelFlag(0)
+def timestamp():
+    "Generate a timestamp."
+    import time
+    y,m,d,h,mi,s,w,dy,ds = time.gmtime(time.time())
+    return "%d-%d-%d %d:%d:%d"%(y,m,d,h,mi,s)
+
+def timeindex(value, units, basetime, delta, calendar):
+    """ Calculate (t - basetime)/delu
+    where t = reltime(value, units)
+    and delu is the time interval (delta, delunits) (e.g., 1 month).
+    """
+    if string.find(units," as ")==-1:
+        tval = cdtime.reltime(value, units)
+    else:
+        tval = cdtime.abstime(value, units)
+    newval = tval.torel(basetime, calendar)
+    if delta is None:
+        return newval.value
+    else:
+        return newval.value/delta
+
+def combineKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
+    """ Combine dictionary keys into an axis.
+    dict: (i,j) => (path, axisname)
+    typedict is either timedict or levdict or fcdict.
+    timeIsLinear is true iff time has a linear representation.
+    referenceDelta is only used for error checks if timeIsLinear is true.
+    """
+
+    global verbose
+
+    # Sort the projected time, level indices
+    keys = dict.keys()
+    keys.sort()
+
+    axislist = []
+    prevend = None
+    prevpath = None
+    name0 = None
+    compressPart = []
+    partition = []
+    previ = 0
+    firstunits = None
+    prevvals = None
+    coordToInd = {(None,None):(None,None)}
+    linCoordToInd = {(None,None):(None,None)}
+    iadj = None
+    errorOccurred = 0
+    for i0, i1 in keys:
+        path, name = dict[(i0, i1)]
+        if name0 is None:
+            name0 = name
+        values, units, dummy = typedict[(path,name)]
+        if firstunits is None:
+            firstunits = units
+        if prevend is not None and prevend>=i0:
+            if prevend>=i1:
+                if verbose:
+                    print >> sys.stderr,  'Warning, file %s, dimension %s contains values in file %s'%(prevpath,name,path)
+                if timeIsLinear:
+                    iind = lookupArray(prevvals, values[0])
+                    jind = lookupArray(prevvals, values[-1])
+                else:
+                    iind = lookupArray(prevvals, i0)
+                    jind = lookupArray(prevvals, i1)
+                if len(values)!=(jind-iind+1):
+                    raise RuntimeError, 'Dimension %s in files %s [len(%s)=%d], %s [len(%s)=%d], is inconsistent'%(name, prevpath, name, (jind-iind+1), path, name, len(values))
+                coordToInd[(i0,i1)] = (iind, jind)
+                
+                prevspart, prevepart = partition[-1]
+                linCoordToInd[(i0,i1)] = (prevspart+iind, prevspart+jind+1)
+                continue
+            else:                       # Fix partial overlap
+                if timeIsLinear:
+                    jind = lookupArray(prevvals, values[0])
+                else:
+                    jind = lookupArray(prevvals, i0)
+                if verbose:
+                    print >> sys.stderr,  'Warning, file %s, dimension %s overlaps file %s, value=%f'%(prevpath,name,path,prevvals[jind])
+                previ, prevj = compressPart[-1]
+                prevj = previ + jind
+                axislist[-1] = prevvals[0:jind]
+                compressPart[-1] = (previ, prevj)
+                coordToInd[(prevvals[0], prevvals[-1])] = (previ, prevj)
+                previ = prevj
+
+                prevspart, prevepart = partition[-1]
+                prevepart = prevspart + jind
+                partition[-1] = (prevspart, prevepart)
+                linCoordToInd[(prevvals[0], prevvals[-1])] = (prevspart, prevepart)
+
+        axislist.append(values)
+        prevend = i1
+        prevpath = path
+        prevj = previ+len(values)
+        compressPart.append((previ, prevj))
+        coordToInd[(i0,i1)] = (previ, prevj)
+
+        if iadj is None:                # partition has to start with 0
+            iadj = int(i0)
+        spart = int(i0) - iadj
+        epart = int(i1) + 1 - iadj
+        partition.append((spart, epart))
+        linCoordToInd[(i0,i1)] = (spart, epart)
+        if timeIsLinear and len(values)!=(epart-spart):
+            # Find the bad values
+            diffs = values[1:]-values[:-1]
+            badindices = numpy.compress(numpy.not_equal(diffs,referenceDelta),range(len(values)))
+            badvalues = numpy.take(values, badindices)
+            if verbose:
+                print >> sys.stderr,  "Error: Missing values in %s after times: %s. Set delta with the -i option or turn off linear mode with the -j option."%(path,str(badvalues))
+            errorOccurred = 1
+
+        prevvals = values
+        previ = prevj
+        
+    fullaxis = numpy.ma.concatenate(axislist)
+    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
+
+def useKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
+    """ Use dictionary keys for an axis.  This is like combineKeys (same arguments, same return values,
+    was written by simplifying combineKeys), but this doesn't do nearly so much because this is
+    for an axis where there is no splitting across files, hence partitions are not needed.
+    dict: (i,j) => (path, axisname)
+    typedict is either timedict or levdict or fcdict.
+    timeIsLinear is true iff time has a linear representation.
+    referenceDelta is only used for error checks if timeIsLinear is true.
+    """
+    global verbose
+
+    # Sort the projected time, level indices
+    keys = dict.keys()
+    keys.sort()
+
+    axislist = []
+    name0 = None
+#    compressPart = []
+    compressPart = None
+#    partition = []
+    partition = None
+#    previ = 0
+    firstunits = None
+#    coordToInd = {(None,None):(None,None)}
+#    linCoordToInd = {(None,None):(None,None)}
+    coordToInd = None
+    linCoordToInd = None
+    errorOccurred = 0
+    for i0, i1 in keys:
+        path, name = dict[(i0, i1)]
+        if name0 is None:
+            name0 = name
+        values, units, dummy = typedict[(path,name)]
+        if firstunits is None:
+            firstunits = units
+        axislist.append(values)
+#        prevj = previ+len(values)
+#        coordToInd[(i0,i1)] = (previ, prevj)
+
+    fullaxis = numpy.ma.concatenate(axislist)
+    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
+
+def copyDict(dict):
+    """Copy a dictionary-like object dict to a true dictionary"""
+    result = {}
+    for key in dict.keys():
+        result[key] = dict[key]
+
+    return result
+
+def disambig(name, dict, num, comparator, value):
+    """ Make an unique name from name, wrt to the keys in dictionary dict.
+    Try using num first. comparator(value,dict[name]) returns 0 if equal, 1 if not.
+    """
+    if not dict.has_key(name) or not comparator(value, dict[name]):
+        uniqname = name
+    else:
+        uniqname = '%s_%d'%(name,num)
+        if dict.has_key(uniqname) and comparator(value, dict[uniqname]):
+            trial_name = uniqname
+            for letter in string.lowercase:
+                uniqname = '%s_%s'%(trial_name,letter)
+                if not dict.has_key(uniqname) or not comparator(value, dict[uniqname]):
+                    break
+            else:
+                raise 'Cannot make axis name unique: ',name
+
+    return uniqname
+
+def compareaxes(axis1, axis2):
+    """Return 0 if equal, 1 if not"""
+    return ((len(axis1)!=len(axis2)) or not numpy.ma.allclose(axis1[:],axis2[:]))
+
+def comparedomains(domain1, domain2):
+    """Return 0 if equal, 1 if not"""
+    if len(domain1)!=len(domain2):
+        return 1
+    for i in range(len(domain1)):
+        item1 = domain1[i]
+        item2 = domain2[i]
+        if type(item1)!=type(item2):
+            return 1
+        if type(item1)==types.StringType:
+            return item1!=item2
+        elif compareaxes(item1, item2):
+            return 1
+    return 0
+
+def compareVarDictValues(val1, val2):
+    return comparedomains(val1[0], val2[0])
+
+def cleanupAttrs(attrs):
+    for attname in attrs.keys():
+        attval = attrs[attname]
+        if type(attval) is numpy.ndarray:
+            if len(attval)==1:
+                attrs[attname] = attval[0]
+            else:
+                attrs[attname] = str(attval)
+    if attrs.has_key('missing_value') and attrs['missing_value'] is None:
+        del attrs['missing_value']
+
+def validateAttrs(node):
+    """Compare attributes against DTD."""
+
+    global verbose
+
+    if hasattr(node,'datatype'):
+        parenttype = node.datatype
+    else:
+        parenttype = None
+    atts = node.getExternalDict()
+    for attname in atts.keys():
+        (attval,datatype)=atts[attname] # (XML value, datatype)
+        constraint = node.extra.get(attname)
+        if constraint is not None:
+            (scaletype,reqtype)=constraint # (CdScalar|CdArray, required type)
+            if reqtype==CdFromObject:
+                reqtype = parenttype
+            if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
+                if reqtype in (CdFloat,CdDouble) and type(attval)!=types.FloatType:
+                    try:
+                        attval = string.atof(attval)
+                    except:
+                        if verbose:
+                            print >> sys.stderr,  "Warning: %s=%s should be a float, id=%s"%(attname,attval,node.id),
+                        try:
+                            attval = string.atoi(attval)
+                            attval = float(attval)
+                            if verbose:
+                                print "(Recasting)"
+                            node.setExternalAttr(attname,attval)
+                        except:
+                            if attname in ['modulo', 'add_offset', 'scale_factor']:
+                                if verbose:
+                                    print "(Removing)"
+                                attdict = node.getExternalDict()
+                                del attdict[attname]
+                            else:
+                                if verbose:
+                                    print ""
+                elif reqtype in (CdShort,CdInt,CdLong) and type(attval)!=types.IntType:
+                    try:
+                        attval = string.atoi(attval)
+                    except:
+                        if verbose:
+                            print >> sys.stderr,  "Warning: %s=%s should be an integer, id=%s"%(attname,attval,node.id),
+                        try:
+                            attval = string.atof(attval)
+                            attval = int(attval)
+                            if verbose:
+                                print "(Recasting)"
+                            node.setExternalAttr(attname,attval)
+                        except:
+                            if verbose:
+                                print ""
+
+def cloneWithLatCheck(axis):
+    """Clone an axis, ensuring that latitudes (in degrees) are in the range [-90:90]"""
+
+    global verbose
+    global notrimlat
+
+    axisvals = origvals = axis[:]
+    if axis.isLatitude() and hasattr(axis,"units") and string.lower(axis.units[0:6])=="degree":
+        if notrimlat==0:
+            axisvals = numpy.maximum(-90.0, numpy.minimum(90.0,axisvals))
+        if not numpy.ma.allclose(axisvals, origvals) and verbose:
+            print >> sys.stderr,  "Warning: resetting latitude values: ",origvals," to: ",axisvals
+
+    b = axis.getBounds()
+    mycopy = cdms2.createAxis(copy.copy(axisvals))
+    mycopy.id = axis.id
+    try:
+        mycopy.setBounds(b)
+    except CDMSError:
+        b = mycopy.genGenericBounds()
+        mycopy.setBounds(b)
+    for k, v in axis.attributes.items():
+       setattr(mycopy, k, v)
+    return mycopy
+
+def addAttrs(fobj, eattrs):
+    """Add extra attributes to file/dataset fobj.
+    eattrs has the form [(varid,attr,value), (varid,attr,value), ...]
+    where if varid is '', set the global attribute."""
+    for evar,eattr,evalue in eattrs:
+        if evar=='':
+            fobj.__dict__[eattr] = evalue
+        else:
+            varobj = fobj[evar]
+            if varobj is not None:
+                varobj.__dict__[eattr] = evalue
+
+def setNodeDict(node, dict):
+    for key in dict.keys():
+        value = dict[key]
+        if (isinstance(value, numpy.integer) or isinstance(value, types.IntType)):
+            datatype = CdLong
+        elif (isinstance(value, numpy.floating) or isinstance(value, types.FloatType)):
+            datatype = CdDouble
+        else:
+            datatype = CdString
+        node.attribute[key]=(value,datatype)
+
+def initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
+                        referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
+                        dirlen, overrideCalendar ):
+    # This function was formerly part of the body of "main".
+        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
+        # where tc0 is the first time index relative to the reference time, tc1 the last,
+        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
+        # of the time dimension of the variable, levid is the id of the level dimension
+        # 
+        # timedict : (path, timeid) => (timearray, timeunits, calendar)
+        #
+        # levdict : (path, levelid) => (levelarray, levelunits, None)
+        #
+    initfilemap = cdms2.dataset.parseFileMap(extendDset.cdms_filemap)
+    dsetdirec = extendDset.directory
+    for namelist, slicelist in initfilemap:
+        for name in namelist:
+            var  = extendDset[name]
+            timeaxis = var.getTime()
+            if timeaxis is not None and not overrideCalendar:
+                calendar = timeaxis.getCalendar()
+            if splitOnTime and timeaxis is not None:
+                if hasattr(timeaxis, 'name_in_file'):
+                    timeid = timeaxis.name_in_file
+                else:
+                    timeid = timeaxis.id
+                if referenceTime is None:
+                    referenceTime = timeaxis.units
+                if timeIsLinear in [None,1]:
+                    timeIsLinear = timeaxis.isLinear()
+                    if timeIsLinear:
+                        if len(timeaxis)>1:
+                            referenceDelta = timeaxis[1]-timeaxis[0]
+                        else:
+                            referenceDelta = 1.0
+                    else:
+                        referenceDelta = None
+            else:
+                timeid = None
+            levelaxis = var.getLevel()
+            if splitOnLevel and levelaxis is not None:
+                if hasattr(levelaxis, 'name_in_file'):
+                    levid = levelaxis.name_in_file
+                else:
+                    levid = levelaxis.id
+            else:
+                levid = None
+
+            varmaplist = []
+            for t0, t1, lev0, lev1, path in slicelist:
+                fullpath = os.path.join(dsetdirec,path)
+                basepath = fullpath[dirlen:]
+                if t0 is not None:
+                    tc0 = timeindex(timeaxis[t0], timeaxis.units, referenceTime, referenceDelta, calendar)
+                    tc1 = timeindex(timeaxis[t1-1], timeaxis.units, referenceTime, referenceDelta, calendar)
+                    if not timedict.has_key((basepath, timeid, calendar)):
+                        values = timeaxis[t0:t1]
+                        timedict[(basepath, timeid)] = (values, timeaxis.units, calendar)
+                else:
+                    tc0 = tc1 = None
+                if lev0 is not None:
+                    lc0 = levelaxis[lev0]
+                    lc1 = levelaxis[lev1-1]
+                    if not levdict.has_key((basepath, levid, None)):
+                        values = levelaxis[lev0:lev1]
+                        levdict[(basepath, levid)] = (values, levelaxis.units, None)
+                else:
+                    lc0 = lc1 = None
+                varmaplist.append((tc0, tc1, lc0, lc1, basepath, timeid, levid, calendar))
+            if filemap.has_key(name):
+                filemap[name].extend(varmaplist)
+            else:
+                filemap[name] = varmaplist
+
+#---------------------------------------------------------------------------------------------
+
+verbose = 1
+
+def main(argv):
+
+    global verbose
+    global notrimlat
+
+    try:
+        args, lastargs = getopt.getopt( \
+            argv[1:], "a:c:d:e:f:hi:jl:m:p:qr:s:t:x:", \
+            ["include=","include-file=","exclude=","exclude-file=","forecast","time-linear=", \
+             "notrim-lat","var-locate=","ignore-open-error" ] )
+    except getopt.error:
+        print >> sys.stderr,  sys.exc_value
+        print >> sys.stderr,  usage
+        sys.exit(0)
+
+    calendar = None
+    calenkey = None
+    timeid = None
+    levelid = None
+    notrimlat = 0
+    referenceTime = None
+    referenceDelta = None
+    readFromFile = 0
+    splitOnTime = 1
+    splitOnLevel = 0
+    datasetid = "none"
+    timeIsLinear = None
+    writeToStdout = 1
+    templatestr = None
+    timeIsVector = None
+    modelMapFile = None
+    aliasMapFile = None
+    overrideCalendar = 0
+    extraAttrs = []
+    extraDict = {}
+    includeList = None
+    excludeList = None
+    overrideTimeLinear = None
+    varLocate = None
+    ignoreOpenError = False
+    excludePattern = None
+    includePattern = None
+    forecast = False
+    for flag, arg in args:
+        if flag=='-a':
+            aliasMapFile = arg
+        elif flag=='-c':
+            calenkey = string.lower(arg)
+            calendar = calendarMap[calenkey]
+            overrideCalendar = 1
+        elif flag=='-d':
+            datasetid = arg
+        elif flag=='-e':
+            matchObj = attrPattern.match(arg)
+            if matchObj is None:
+                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
+            matchGroups = matchObj.groups()
+            if len(matchGroups)!=3:
+                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
+            matchValue = matchGroups[2]
+            if len(matchValue)>0 and (matchValue[0].isdigit() or matchValue[0] in ['"',"'","-","+"]): #"
+                matcheval = eval(matchValue)
+            else:
+                matcheval = str(matchValue)
+            extraAttrs.append((matchGroups[0], matchGroups[1], matcheval))
+        elif flag=='--exclude':
+            if arg[0]=='-':
+                raise RuntimeError, "--exclude option requires an argument"
+            excludeList = string.split(arg,',')
+        elif flag=='--exclude-file':
+            excludePattern = arg
+        elif flag=='-f':
+            readFromFile = 1
+            filelistpath = arg
+        elif flag=='--forecast':  # experimental forecast mode
+            forecast = True
+            splitOnTime = 0
+            splitOnLevel = 0
+        elif flag=='-h':
+            print usage
+            sys.exit(0)
+        elif flag=='-i':
+            splitOnTime = 1
+            referenceDelta = string.atof(arg)
+            timeIsLinear = 1
+            timeIsVector = None
+        elif flag=='--ignore-open-error':
+            ignoreOpenError = True
+        elif flag=='--include':
+            if arg[0]=='-':
+                raise RuntimeError, "--include option requires an argument"
+            includeList = string.split(arg,',')
+        elif flag=='--include-file':
+            includePattern = arg
+        elif flag=='-j':
+            timeIsVector = 1
+            timeIsLinear = None
+        elif flag=='-l':
+            splitOnLevel = 1
+            levelstr = string.split(arg,',')
+            levellist = map(string.atof, levelstr)
+            levels = numpy.array(levellist)
+            levels = numpy.sort(levels)
+        elif flag=='-m':
+            levelid = arg
+            args.append(('-e','%s.axis=Z'%levelid)) # Add axis=Z attribute
+        elif flag=='--notrim-lat':
+            notrimlat = 1
+        elif flag=='-p':
+            templatestr = arg
+        elif flag=='-q':
+            verbose = 0
+        elif flag=='-r':
+            splitOnTime = 1
+            referenceTime = arg
+        elif flag=='-s':
+            modelMapFile = arg
+        elif flag=='-t':
+            splitOnTime = 1
+            timeid = arg
+            args.append(('-e','%s.axis=T'%timeid)) # Add axis=T attribute
+        elif flag=='--time-linear':
+            targlist = string.split(arg,',')
+            ttzero = string.atof(targlist[0])
+            tdelta = string.atof(targlist[1])
+            tunits = string.strip(targlist[2])
+            if len(targlist)==4:
+                tcalendar = string.strip(targlist[3])
+            else:
+                tcalendar = None
+            overrideTimeLinear = [ttzero,tdelta,tunits,tcalendar]
+        elif flag=='--var-locate':
+            if varLocate is None:
+                varLocate = {}
+            vname, pattern = string.split(arg,',')
+            varLocate[vname]=pattern
+        elif flag=='-x':
+            writeToStdout = 0
+            xmlpath = arg
+
+    # If overriding time, process time as vector so that no gaps result
+    if overrideTimeLinear is not None:
+        timeIsVector = 1
+        timeIsLinear = None
+        if overrideCalendar==1:
+            overrideTimeLinear[3]=calenkey
+
+    if verbose:
+        print 'Finding common directory ...'
+    if readFromFile:
+        f = open(filelistpath)
+        lastargs = f.readlines()
+        f.close()
+
+    # Ignore blank paths
+    realargs = []
+    for arg in lastargs:
+        sarg = string.strip(arg)
+        if len(sarg)>0:
+            realargs.append(sarg)
+    lastargs = realargs
+
+    # Split lastargs into files and datasets
+    fileargs = []
+    dsetargs = []
+    for arg in lastargs:
+        base, suffix = os.path.splitext(arg)
+        if string.lower(suffix) in ['.xml','.cdml']:
+            dsetargs.append(arg)
+        else:
+            fileargs.append(arg)
+
+    # Generate a list of pathnames for datasets
+    dsetfiles = []
+    for path in dsetargs:
+        dset = cdms2.open(path)
+        if not hasattr(dset, 'cdms_filemap'):
+            raise RuntimeError,'Dataset must have a cdms_filemap attribute: '+path
+        if not hasattr(dset, 'directory'):
+            raise RuntimeError,'Dataset must have a directory attribute: '+path
+        dsetdirec = dset.directory
+        initfilemap = cdms2.dataset.parseFileMap(dset.cdms_filemap)
+        for namelist, slicelist in initfilemap:
+            for t0, t1, lev0, lev1, path in slicelist:
+                dsetfiles.append(os.path.join(dsetdirec, path))
+    augmentedArgs = fileargs + dsetfiles
+
+    # Find the common directory
+    directory = os.path.commonprefix(augmentedArgs)
+    firstpath = augmentedArgs[0][len(directory):]
+    if not os.path.isfile(os.path.join(directory,firstpath)):
+        dnew = os.path.dirname(directory)
+        if len(dnew)>0 and directory[len(dnew)]=='/':
+            directory = dnew+'/'
+        else:
+            directory = dnew
+    if verbose:
+        print 'Common directory:',directory
+
+    dirlen = len(directory)
+
+    if templatestr is not None:
+        if os.path.isabs(templatestr):
+            templatestr = templatestr[dirlen:]
+        templatere, ignore = cdms2.cdmsobj.templateToRegex(templatestr)
+        template = re.compile(templatere+'$')
+    else:
+        template = None
+
+    axisdict = {}
+    vardict = {}
+    filemap = {}
+    timedict = {}
+    levdict = {}
+    fcdict = {}
+    global_attrs = None
+    fctau0 = None
+
+    if modelMapFile is not None:
+        mfile = open(modelMapFile)
+        modelMap = {}
+        modelDirs = []
+        for line in mfile.readlines():
+            mdirec, model = string.split(line)
+            modelMap[mdirec] = model
+            modelDirs.append(mdirec)
+        mfile.close()
+
+    if aliasMapFile is not None:
+        afile = open(aliasMapFile)
+        aliasMap = {}
+        for line in afile.readlines():
+            if line[0] not in ["'",'"']: #"
+                varid, alias = string.split(line)
+            else:
+                dummy, varid, alias = string.split(line,line[0])
+                alias = string.strip(alias)
+            aliasMap[varid] = alias
+        afile.close()
+
+    # Save extra attribute information for new axes
+    for evar, eattr, evalue in extraAttrs:
+        if evar=='':
+            continue
+        if extraDict.has_key(evar):
+            curval = extraDict[evar]
+            curval.append((eattr,evalue))
+        else:
+            extraDict[evar] = [(eattr,evalue)]
+
+    #---------------------------------------------------------------------------------------------
+    # Initialize dictionaries if adding to an existing dataset
+    if verbose and len(dsetargs)>0:
+        print 'Scanning datasets ...'
+    for extendPath in dsetargs:
+        if verbose:
+            print extendPath
+        extendDset = cdms2.open(extendPath)
+
+        # Add/modify attributes
+        addAttrs(extendDset, extraAttrs)
+
+        # Copy the global attribute dictionary if necessary. Note that copy.copy
+        # can't be used here, since .attributes is now a 'fake' dictionary.
+        if global_attrs is None:
+            global_attrs = copyDict(extendDset.attributes)
+
+        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
+        # where tc0 is the first time index relative to the reference time, tc1 the last,
+        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
+        # of the time dimension of the variable, levid is the id of the level dimension
+        # 
+        # timedict : (path, timeid) => (timearray, timeunits, calendar)
+        #
+        # levdict : (path, levelid) => (levelarray, levelunits, None)
+        #
+        initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
+                            referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
+                            dirlen, overrideCalendar )
+
+        # axisdict : id => transient_axis
+        #   for non-partitioned axes only
+        #
+        tempmap = {}
+        for axis in extendDset.axes.values():
+            if not ( (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
+                     (splitOnLevel and (axis.isLevel() or axis.id==levelid)) ):
+                axis = cloneWithLatCheck(axis)
+                if axisdict.has_key(axis.id):
+                    currentaxis = axisdict[axis.id]
+
+                    # Check that the axis has the same length and values as the saved value. If not,
+                    # create an unambiguous name in the axis dictionary.
+                    if compareaxes(axis, currentaxis):
+                        sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
+                        axis.name_in_file = axis.id
+                        oldid = axis.id
+                        axis.id = sepname
+                        axisdict[sepname] = axis
+                        tempmap[oldid] = sepname
+                else:
+                    axisdict[axis.id] = axis
+
+        # vardict : varid => [domain, attributeDict, typecode]
+        #   where domain = [axis_or_id, axis_or_id,...]
+        #   and axis_or_id is the id of a partitioned dimension, or
+        #   the transient axis object associated with a non-partitioned dimension
+        #
+        for var in extendDset.variables.values():
+            tempdomain = []
+            for id in var.getAxisIds():
+                if tempmap.has_key(id):
+                    id = tempmap[id]
+                if axisdict.has_key(id):
+                    tempdomain.append(axisdict[id])
+                else:
+                    axis = extendDset[id]
+                    if hasattr(axis,'name_in_file'):
+                        id = axis.name_in_file
+                    tempdomain.append(id)
+            varattrs = copyDict(var.attributes)
+            vardict[var.id] = [tempdomain, varattrs, var.typecode()]
+
+        extendDset.close()
+
+        # end of loop "for extendPath in dsetargs"
+
+    #---------------------------------------------------------------------------------------------
+    if verbose:
+        print 'Scanning files ...'
+
+    boundsmap = {}                      # boundsmap : varid => timebounds_id
+    boundsdict = {}                     # Same as vardict for time bounds
+    for path in fileargs:
+        path = string.strip(path)
+
+        # Check if the path is included
+        if includePattern is not None:
+            base = os.path.basename(path)
+            mobj = re.match(includePattern, base)
+            if mobj is None:
+                continue
+
+        # Check if the path is excluded
+        if excludePattern is not None:
+            base = os.path.basename(path)
+            mobj = re.match(excludePattern, base)
+            if mobj is not None:
+                continue
+
+        if verbose:
+            print path
+        try:
+            f = cdms2.open(path)
+        except:
+            if not ignoreOpenError:
+                raise RuntimeError,'Error opening file '+path
+            else:
+                print >> sys.stderr,  'Warning: cannot open file, skipping: %s'%path
+                continue
+
+        # Add/modify attributes
+        addAttrs(f, extraAttrs)
+
+        # Determine the variable ID suffix, if any
+        varsuffix = None
+        if modelMapFile is not None:
+            for direc in modelDirs:
+                mo = re.match(direc, path)
+                if mo is not None:
+                    suffixPattern = modelMap[direc]
+                    def gensuffix(m, mo=mo):
+                        i = string.atoi(m.group(1))
+                        return mo.group(i)
+                    varsuffix = re.sub(r'\\g<(\d)>', gensuffix, suffixPattern)
+                    break
+
+        # Copy the global attribute dictionary if necessary. Note that copy.copy
+        # can't be used here, since .attributes is now a 'fake' dictionary.
+        if global_attrs is None:
+            global_attrs = copyDict(f.attributes)
+
+        basepath = path[dirlen:]
+        if template is not None and template.match(basepath) is None:
+            if verbose:
+                print >> sys.stderr,  'Warning: path %s does not match template %s'%(basepath, templatestr)
+
+        # Find time boundary variables
+        boundsids = []
+        if splitOnTime:
+            tmpdict = {}
+            for axisname in f.axes.keys():
+                axis = f[axisname]
+                #was if axis.isTime() and hasattr(axis, 'bounds'):
+                if axis.isTime() and (axis.getBounds() is not None):
+                    tmpdict[axis.bounds] = 1
+            boundsids = tmpdict.keys()
+
+        # For forecasts, get the time at which the forecast begins (tau=0) which
+        # is nbdate,nbsec
+        if forecast:
+            nbdate = numpy.int( f('nbdate') )  # f('nbdate') is numpy.int32 which gets truncated
+            nbsec = f('nbsec')
+            fctau0 = nbdate*100000 + nbsec  # hopefully nbsec<(seconds per day)=86400<100000
+            fctau0time = cdtime.abstime( nbdate,"day as %Y%m%d" )
+            fctau0time = fctau0time.add( nbsec, cdtime.Seconds )  # fctau0 as type comptime
+            fc_time_attrs = []
+
+        varnames = f.variables.keys()
+
+        # Try to force all axes to be included, but only small ones, length<100.
+        # This section was motivated by a need to preserve the cloud axes isccp_prs,isccp_tau.
+        # If we ever need to preserve longer axes as well, we could create one variable per axis...
+        crude_var_axes = [ [ ax[0] for ax in var.getDomain() ] for var in f.variables.values() ]
+        var_axes = set().union( *crude_var_axes )
+        other_axes = list( set(f.axes.values()) - var_axes )
+        if len(other_axes)>0:
+            other_axes = [ax for ax in other_axes if len(ax)<100]
+            other_axes.sort( key=(lambda ax:ax.id) )
+            axisvar = cdms2.createVariable( numpy.ones([len(ax) for ax in other_axes]),
+                                            axes=other_axes, id='allaxesdummy')
+            axisvar.autoApiInfo = None    # all CdmsObj objects have this attribute, but for unknown
+            # reasons datasetnode.dump() fails trying to dump this attribute's default value (jfp)
+            varnames.append( axisvar.id )
+        # ...try to force all axes to be considered
+
+        varnames.sort()
+        for varname in varnames:
+
+            # If --var-locate is specified for the variable, match the basename before processing
+            if varLocate is not None and varLocate.has_key(varname):
+                varpattern = varLocate[varname]
+                base = os.path.basename(path)
+                mobj = re.match(varpattern, base)
+                if mobj is None:
+                    continue
+
+            # was var = f.variables[varname]
+            if varname=='allaxesdummy':
+                var = axisvar
+            else:
+                var = f.variables[varname]
+
+            # Reset the variable ID to any specified alias
+            if aliasMapFile is not None:
+                varalias = aliasMap.get(var.id)
+                if varalias is not None:
+                    var.name_in_file = var.id
+                    var.id = varalias
+                    varname = varalias
+
+            # Append a suffix to the variable ID, if applicable
+            if varsuffix is not None:
+                if not hasattr(var, 'name_in_file'):
+                    var.name_in_file = var.id
+                var.id += varsuffix
+                varname += varsuffix
+
+            varentry = [None]*9         # [timestart, timeend, levstart, levend, path, timeid, levid, calendar, fctau0]
+            varentry[4] = basepath
+            varentry[8] = fctau0
+
+            # Generate a temporary domain entry, and
+            # create axis dictionary entries.
+            domain = var.getDomain()
+            if forecast:
+                tempdomain = ['fctau0']
+            else:
+                tempdomain = []         # List of axis names and/or objects (if not partitioned)
+            for axis, start, length, truelen in domain:
+                if (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
+                   (splitOnLevel and (axis.isLevel() or axis.id==levelid)):
+                    tempdomain.append(axis.id)
+                elif forecast and  (axis.isTime() or axis.id==timeid):
+                    # time axis isn't split but needs special treatment for forecasts
+                    tempdomain.append(axis.id)
+                    fc_time_attrs.append(axis.attributes)
+                else:
+                    axis = cloneWithLatCheck(axis) # Transient copy
+                    if axisdict.has_key(axis.id):
+                        currentaxis = axisdict[axis.id]
+
+                        # Check that the axis has the same length and values as the saved value. If not,
+                        # create an unambiguous name in the axis dictionary.
+                        if compareaxes(axis, currentaxis):
+                            sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
+                            axis.name_in_file = axis.id
+                            axis.id = sepname
+
+                            # Fix boundary variable names if using suffixes.
+                            if varsuffix is not None and hasattr(axis, 'bounds'):
+                                axis.bounds += varsuffix
+                            axisdict[sepname] = axis
+                        else:
+                            axis = currentaxis
+                    else:
+                        # Fix boundary variable names if using suffixes.
+                        if varsuffix is not None and hasattr(axis, 'bounds'):
+                            axis.bounds += varsuffix
+                        axisdict[axis.id] = axis
+                    tempdomain.append(axis)
+
+            # Create a dictionary entry for the variable if not already there.
+            if var.id in boundsids:
+                boundsattrs = copyDict(var.attributes)
+                boundsdict[var.id] = [tempdomain, boundsattrs, var.typecode()]
+                continue                # Don't set a filemap entry until axes are sorted out
+            elif not vardict.has_key(var.id):
+                varattrs = copyDict(var.attributes)
+                if varsuffix is not None or aliasMapFile is not None:
+                    varattrs['name_in_file'] = var.name_in_file
+                vardict[var.id] = [tempdomain, varattrs, var.typecode()]
+            else:
+                currentdomain, attrs, tcode = vardict[var.id]
+                if comparedomains(currentdomain, tempdomain):
+                    sepname = disambig(var.id, vardict, var.size(), compareVarDictValues, (tempdomain, None))
+                    saveid = var.id
+                    varname  = var.id = sepname
+                    varattrs = copyDict(var.attributes)
+                    var.name_in_file = varattrs['name_in_file']  = saveid
+                    vardict[sepname] = [tempdomain, varattrs, var.typecode()]
+
+            # Create a filemap entry for this variable/file, if split on time or forecast
+            axisids = map(lambda x: x[0].id, var.getDomain())
+            if splitOnTime or forecast:
+                vartime = None
+                if timeid is not None:
+                    if timeid in axisids:
+                        vartime = f.axes.get(timeid)
+                    else:
+                        if verbose:
+                            print >> sys.stderr,  'Warning, time axis %s not found, -t option ignored'%timeid
+                if vartime is None:
+                    vartime = var.getTime()
+                if vartime is not None:
+                    if not overrideCalendar:
+                        calendar = vartime.getCalendar()
+                    if referenceTime is None:
+                        referenceTime = vartime.units
+                    if verbose and not forecast:
+                        print 'Setting reference time units to', referenceTime
+                    if timeIsLinear is None and timeIsVector is None:
+                        timeIsLinear = (string.lower(string.split(referenceTime)[0]) in ['hour','hours','minute','minutes','second','seconds'])
+                        if timeIsLinear and verbose:
+                            print 'Setting time representation to "linear"' #'
+                    if timeIsLinear and referenceDelta is None:
+                        if len(vartime)>1:
+                            time1 = timeindex(vartime[1], vartime.units, referenceTime, None, calendar)
+                            time0 = timeindex(vartime[0], vartime.units, referenceTime, None, calendar)
+                            referenceDelta = time1 - time0
+                        else:
+                            referenceDelta = 1
+                        if verbose:
+                            print 'Setting time delta to', referenceDelta
+
+#                    starttime = vartime[0]
+#                    endtime = vartime[-1]
+                    startindex = timeindex(vartime[0], vartime.units, referenceTime, referenceDelta, calendar)
+                    endindex = timeindex(vartime[-1], vartime.units, referenceTime, referenceDelta, calendar)
+                    if forecast:
+                        # split on forecast, hence no split on time 
+                        varentry[0] = None
+                        varentry[1] = None
+                        referenceTime = None
+                    else:
+                        varentry[0] = startindex
+                        varentry[1] = endindex
+                    varentry[5] = vartime.id
+                    varentry[7] = calendar
+
+                    if not timedict.has_key((basepath,vartime.id)):
+                        values = vartime[:]
+                        timedict[(basepath,vartime.id)] = (values, vartime.units, calendar)
+
+            if splitOnLevel:
+                varlev = None
+                if (levelid is not None) and (levelid in axisids):
+                    varlev = f.axes.get(levelid)
+                if varlev is None:
+                    varlev = var.getLevel()
+                if varlev is not None:
+                    startlev = varlev[0]
+                    if type(startlev) is numpy.ndarray:
+                        startlev = startlev[0]
+                    endlev = varlev[-1]
+                    if type(endlev) is numpy.ndarray:
+                        endlev = endlev[0]
+                    varentry[2] = startlev
+                    varentry[3] = endlev
+                    varentry[6] = varlev.id
+
+                    if not levdict.has_key((basepath, varlev.id, None)):
+                        values = varlev[:]
+                        levdict[(basepath,varlev.id)] = (values, varlev.units, None)
+
+            if forecast:
+                if not fcdict.has_key((basepath, 'fctau0')):
+                    fcdict[(basepath, 'fctau0')] = ( [fctau0], None, None )
+
+            if filemap.has_key(varname):
+                filemap[varname].append(tuple(varentry))
+            else:
+                filemap[varname] = [tuple(varentry)]
+
+            # Set boundsmap : varid => timebounds_id
+            #was if splitOnTime and vartime is not None and hasattr(vartime, "bounds") and not boundsmap.has_key(varname):
+            if splitOnTime and vartime is not None and (vartime.getBounds() is not None) and\
+                    not boundsmap.has_key(varname):
+                boundsmap[varname] = vartime.bounds
+
+            # End of loop "for varname in varnames"
+
+        f.close()
+        # End of loop "for path in fileargs"
+
+    #---------------------------------------------------------------------------------------------
+
+    # Generate varindex, by combining variable names with
+    # identical varentry values.
+    varindex = []
+    varnames = filemap.keys()
+    varnames.sort()
+    for varname in varnames:
+        varentry = filemap[varname]
+        varentry.sort()
+
+        for varindexname, varindexvalue in varindex:
+            if varentry == varindexvalue:
+                varindexname.append(varname)
+                break
+        else:
+            varindex.append(([varname],varentry))
+
+    # If a variable is not a function of one of the partitioned dimensions,
+    # no indexing is necessary: just read from the first file containing it.
+    for varlist, slicelist in varindex:
+        slice0 = slicelist[0]
+        a,b,c,d,path0,timename,levname,calen,fctau0 = slice0
+        if (a,b,c,d,fctau0)==(None,None,None,None,None):
+            del slicelist[1:]
+
+    # Change times to constant units
+    sameCalendars = 1                   # True iff all time calendars are the same
+    prevcal = None
+    if forecast:
+        # The data files' time axis is interpreted to be tau time, i.e. the forecast_period.
+        # Find the axis, and remember it in timedict.
+        for key in timedict.keys():
+            values, units, calendar = timedict[key]
+            if prevcal is not None and calendar != prevcal:
+                sameCalendars = 0
+            prevcal = calendar
+            if string.find(units," as ")==-1:
+                time0 = cdtime.reltime(values[0],units)
+            else:
+                time0 = cdtime.abstime(values[0],units)
+            offset = time0.torel( units, calendar ).value  # normally will be 0
+            values = values+offset-values[0]
+            # Switch units from "normal" time such as "days since 2001-06-01"
+            # to "basic" time such as "days", which makes sense for a forecast_period.
+            baslen = time0.units.find(' since ')
+            basic_units = time0.units[0:baslen]  # e.g. 'days'
+            fc_units = basic_units
+            timedict[key] = (values, fc_units, calendar)
+    else:       # splitOnTime is true
+        for key in timedict.keys():
+            values, units, calendar = timedict[key]
+            if prevcal is not None and calendar != prevcal:
+                sameCalendars = 0
+            prevcal = calendar
+            if string.find(units," as ")==-1:
+                time0 = cdtime.reltime(values[0],units)
+            else:
+                time0 = cdtime.abstime(values[0],units)
+            offset = time0.torel(referenceTime, calendar).value
+            values = values+offset-values[0]
+            timedict[key] = (values, referenceTime, calendar)
+
+    if sameCalendars and prevcal is not None:
+        calenkey = reverseCalendarMap[calendar]
+        
+    if forecast:
+        # For forecasts, make sure that the above has made all timedict values the same.
+        # >>> It's conceivable that different forecasts will have different time (really, tau)
+        # >>> axes.  If so, at this point we'll want to merge and mask all the time values, so
+        # >>> that all variables can have the same time axis..  For now, just raise an error
+        # >>> if there are time axis differences at this point.
+        values0,units0,calendar0 = timedict[ timedict.keys()[0] ]
+        timedict_same = all( [ ((values0==values).all() and units0==units and calendar0==calendar) \
+                               for (values,units,calendar) in timedict.values() ] )
+        if not timedict_same:
+            raise CDMSError, 'cdscan is confused about times for a forecast set'
+        # Earlier we had saved all the time axis attributes.  Keep whatever they have in common.
+        fc_time_attr = fc_time_attrs[0]
+        for fcta in fc_time_attrs:             # go through all time attributes (each a dictionary)
+            for attrn in fc_time_attr.keys():
+                if not fcta.has_key(attrn):
+                    del fc_time_attr[attrn]    # key attrn isn't in all time attributes
+                elif fcta[attrn]!=fc_time_attr[attrn]:
+                    del fc_time_attr[attrn]    # not all time attributes have the same value for attrn
+        # At this point fc_time_attr is the dictionary of those time attributes which are common to
+        # all time axes encountered (in the context of a forecast dataset).
+        # Finally, add the appropriate standard_name to it, if we haven't already gotten one from
+        # the data file.  If the file has anything other than 'forecast_period', it's wrong, but
+        # we'll stick with it anyway.
+        if not 'standard_name' in fc_time_attr.keys():
+            fc_time_attr['standard_name'] = 'forecast_period'
+        
+    # Create partitioned axes
+    axes = []
+    masterCoordToInd = {}               # varkey => (timeCoordToInd, levCoordToInd)
+    errorOccurred = 0
+    for varlist, varentry in varindex:
+
+        # Project time, level indices
+        timeproj = {}
+        levproj = {}
+        fctproj = {}
+        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varentry:
+            if timename is not None:
+                timeproj[(time0, time1)] = (path, timename)
+            if levname is not None:
+                try:
+                    levproj[(lev0, lev1)] = (path, levname)
+                except:
+                    print >> sys.stderr,  'Cannot hash level %s range (%f,%f)'%(levname,lev0,lev1)
+                    print >> sys.stderr,  type(lev0)
+                    raise
+            if fctau0 is not None:
+                fctproj[(fctau0,fctau0)] = (path, 'fctau0')
+
+        # and combine the projected indices into axes
+        timeCoordToInd = None
+        timelinCoordToInd = None
+        if splitOnTime and timename is not None:
+            fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
+                      combineKeys(timeproj, timedict, timeIsLinear, referenceDelta)
+            axes.append( ( varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
+                           timelinCoordToInd, calendar ) )
+            if errflag: errorOccurred = 1
+        levCoordToInd = None
+        if splitOnLevel and levname is not None:
+            fullaxis, name, partition, levCoordToInd, units, opartition, levlinCoordToInd, errflag = \
+                      combineKeys(levproj, levdict)
+            axes.append((varlist,fullaxis,name,partition,levCoordToInd,units,opartition,levlinCoordToInd, None))
+            if errflag: errorOccurred = 1
+        fcCoordToInd = None
+        if forecast:
+            fullaxis, name, partition, fcCoordToInd, units, opartition, fclinCoordToInd, errflag = \
+                      combineKeys(fctproj, fcdict, forecast=forecast)
+            axes.append((varlist,fullaxis,name,partition,fcCoordToInd,units,opartition,fclinCoordToInd, None))
+            if errflag: errorOccurred = 1
+            if len(timeproj)>0:     # i.e., if time is in this variable's domain.
+                # The useKeys call is like combineKeys, except that it's for a variable not partitioned
+                # among files.  It just sets up axis data and (in the context of this variable loop)
+                # propagates what's in timedict to every variable with time in its domain.
+                fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
+                          useKeys(timeproj, timedict, timeIsLinear, referenceDelta)
+                axes.append( (varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
+                              timelinCoordToInd, calendar) )
+                if errflag: errorOccurred = 1
+            
+
+        masterCoordToInd[varlist[0]] = (timeCoordToInd, levCoordToInd, timelinCoordToInd, fcCoordToInd)
+
+    if errorOccurred:
+        raise RuntimeError, 'Error(s) determining axis values - see previous message(s)'
+    
+    # Eliminate duplicate axes
+    axes2 = []
+    for vlist1, axis1, name1, partition1, coordToInd1, units1, opartition1, linCoordToInd1, calen1 in axes:
+        for vlist2, axis2, name2, partition2, coordToInd2, units2, opartition2, linCoordToInd2, calen2 in axes2:
+            if len(axis1)==len(axis2) and name1==name2 and partition1==partition2 and units1==units2 and \
+                   numpy.ma.allclose(axis1,axis2)==1 and calen1==calen2:
+                vlist2.extend(vlist1)
+                break
+        else:
+            axes2.append((copy.copy(vlist1),axis1, name1, partition1, coordToInd1, units1, opartition1, \
+                          linCoordToInd1, calen1))
+
+    # For each axis described by axis2, disambiguate its name, create the axis object, etc.
+    assignedBounds = {}
+    for vlist, axis, name, partition, coordToInd, units, opartition, linCoordToInd, calendar in axes2:
+        # print vlist, coordToInd
+        uniqname = disambig(name, axisdict, len(axis), compareaxes, axis)
+        axisobj = cdms2.createAxis(axis)
+        axisobj.name_in_file = name
+        axisobj.id = uniqname
+        axisobj.units = units
+        if forecast and axisobj.isTime():   # For forecasts, give the time axis some saved attributes.
+            for attr in fc_time_attr.keys():
+                if not hasattr(axisobj,attr):
+                    setattr(axisobj,attr,fc_time_attr[attr])
+        if timeIsLinear and axisobj.isTime():
+            axisobj.partition = numpy.ma.ravel(numpy.ma.array(opartition))
+            axisobj.length = axisobj.partition[-1]-axisobj.partition[0]
+            mopartition = numpy.array(opartition)
+            partition_length = numpy.sum(mopartition[:,1]-mopartition[:,0])
+            if partition_length<axisobj.length:
+                axisobj.partition_length = partition_length
+        elif partition is not None:
+            axisobj.partition = numpy.ma.ravel(numpy.ma.array(partition))
+        if axisobj.isTime():
+            axisobj.calendar = reverseCalendarMap[calendar]
+        # axisobj.reference_partition = str(numpy.ma.ravel(numpy.ma.array(opartition)))
+        if not axisdict.has_key(uniqname):
+            axisdict[uniqname] = axisobj
+        for varname in vlist:
+            domain, attributes, tcode = vardict[varname]
+            for i in range(len(domain)):
+                item = domain[i]
+                if type(item)==types.StringType and item==name:
+                    domain[i] = axisobj
+
+        # Add bounds variables to vardict, varindex
+        if axisobj.isTime():
+            reprVar = vlist[0]              # 'Representative' variable having this time axis
+            if boundsmap.has_key(reprVar):
+                boundsname = boundsmap[reprVar]
+                boundsinfo = boundsdict[boundsname]
+                boundsattrs = boundsinfo[1]
+                if uniqname!=name:
+                    boundsattrs['name_in_file'] = boundsname
+                    boundsname = uniqname+'_bnds'
+                if not assignedBounds.has_key(boundsname):
+                    axisobj.bounds = boundsname
+                    for varids, ranges in varindex:
+                        if reprVar in varids:
+                            varids.append(boundsname)
+                    tmpdom = boundsinfo[0]
+                    if type(tmpdom[1])==types.StringType:
+                        bndsobj = tmpdom[0]
+                        boundsdomain = (bndsobj, axisobj)
+                    else:
+                        bndsobj = tmpdom[1]
+                        boundsdomain = (axisobj, bndsobj)
+                    vardict[boundsname] = (boundsdomain, boundsinfo[1], boundsinfo[2])
+                    assignedBounds[boundsname] = 1
+
+    # Collapse like indices in filemap. For example, transform
+    # [x,[[0,10,-,-,file1], [0,10,-,-,file2]]] into
+    # [x,[[0,10,-,-,file1]]]
+    # This occurs for variables such as time boundaries, which are
+    # often duplicated in different files.
+    cdms_filemap_list = []
+    duplicatevars = {}
+    for varindexname, varindexvalue in varindex:
+        timeCoordToInd, levCoordToInd, linCoordToInd, fcCoordToInd = masterCoordToInd[varindexname[0]]
+        newslicedict = {}
+        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varindexvalue:
+            if timeCoordToInd is not None:
+                if timeIsLinear:
+                    i0, i1 = linCoordToInd[(time0, time1)]
+                else:
+                    i0, i1 = timeCoordToInd[(time0, time1)]
+            else:
+                i0 = i1 = None
+            if levCoordToInd is not None:
+                j0, j1 = levCoordToInd[(lev0, lev1)]
+            else:
+                j0 = j1 = None
+            if newslicedict.has_key((i0,i1,j0,j1,fctau0)):
+                currentpath = newslicedict[(i0,i1,j0,j1,fctau0)]
+                if not duplicatevars.has_key(tuple(varindexname)):
+                    duplicatevars[tuple(varindexname)] = (currentpath, path)
+            else:
+                newslicedict[(i0,i1,j0,j1,fctau0)] = path
+        keys = newslicedict.keys()
+        keys.sort()
+        newslicelist = []
+        for i0,i1,j0,j1,fctau0 in keys:
+            path = newslicedict[(i0,i1,j0,j1,fctau0)]
+            newslicelist.append([i0, i1, j0, j1, fctau0, path])
+        cdms_filemap_list.append([varindexname, newslicelist])
+
+    # Check if any duplicated variables are a function of longitude or latitude.
+    # Raise an exception if so.
+    illegalvars = []
+    for varlist in duplicatevars.keys():
+        for varname in varlist:
+            if (excludeList is not None) and (varname in excludeList):
+                continue
+            dom, attrs, tcode = vardict[varname]
+            for axisobj in dom:
+                if axisobj.isLatitude() or axisobj.isLongitude():
+                    path1, path2 = duplicatevars[varlist]
+                    illegalvars.append((varname, path1, path2))
+    if len(illegalvars)>0:
+        raise RuntimeError, "Variable '%s' is duplicated, and is a function of lat or lon: files %s, %s"%illegalvars[0]
+        
+    if verbose and len(duplicatevars.values())>0:
+        print >> sys.stderr,  'Duplicate variables:'
+        for varlist in duplicatevars.keys():
+            path1, path2 = duplicatevars[varlist]
+            print >> sys.stderr,  '\t',varlist,'\t',path1,'\t',path2
+
+    # Generate the cdms_filemap attribute
+    cdms_filemap = str(cdms_filemap_list)
+    cdms_filemap = string.replace(cdms_filemap, ' ', '')
+    cdms_filemap = string.replace(cdms_filemap, 'None', '-')
+    cdms_filemap = string.replace(cdms_filemap, '"', '') #"
+    cdms_filemap = string.replace(cdms_filemap, "'", '')
+
+    # Dump to XML
+    datasetnode = cdmsNode.DatasetNode(datasetid)
+    global_attrs['cdms_filemap'] = cdms_filemap
+    global_attrs['directory'] = directory
+    if sameCalendars and calenkey is not None:
+        global_attrs['calendar'] = calenkey
+    elif global_attrs.has_key('calendar'):
+        del global_attrs['calendar']
+    cleanupAttrs(global_attrs)
+    # datasetnode.setExternalDict(global_attrs)
+    setNodeDict(datasetnode, global_attrs)
+    validateAttrs(datasetnode)
+
+    timeWasOverridden = 0
+    keys = axisdict.keys()
+    keys.sort()
+    for key in keys:
+        axis = axisdict[key]
+        tcode = axis.typecode()
+        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
+            tcode = numpy.sctype2char(tcode)
+        cdtype = cdmsNode.NumericToCdType[tcode]
+        node = cdmsNode.AxisNode(axis.id, len(axis), cdtype)
+
+        # Override the time axis as a linear axis
+        if axis.isTime() and (overrideTimeLinear is not None):
+            ttzero = overrideTimeLinear[0]
+            ttdelta = overrideTimeLinear[1]
+            axis.units = overrideTimeLinear[2]
+            if overrideTimeLinear[3] is None:
+                axis.calendar = reverseCalendarMap[axis.getCalendar()]
+            else:
+                axis.calendar = overrideTimeLinear[3]
+            linearnode = cdmsNode.LinearDataNode(ttzero, ttdelta, len(axis))
+            node.setLinearData(linearnode)
+            if verbose:
+                if timeWasOverridden==0:
+                    print "Overriding values for axis '%s'"%axis.id
+                else:
+                    print >> sys.stderr,  'Warning, overriding more than one time axis (%s)'%axis.id
+            timeWasOverridden = 1
+
+        # Represent time as linear axis using time values in the file
+        elif axis.isTime() and timeIsLinear:
+            reference_length = axis.partition[-1]-axis.partition[0]
+            linearnode = cdmsNode.LinearDataNode(axis[0], referenceDelta, reference_length)
+            node.setLinearData(linearnode)
+        else:
+            try:
+                node.setData(axis[:])
+            except cdms2.cdmsNode.NotMonotonicError:
+                if verbose:
+                    print >> sys.stderr,  'Warning: Axis values for axis %s are not monotonic:'%axis.id,axis[:]
+                    print >> sys.stderr,  'Warning: Resetting axis %s values to:'%axis.id, numpy.arange(len(axis))
+                node.setData(numpy.arange(len(axis)))
+        axisattrs = copyDict(axis.attributes)
+
+        # Make sure that new axes have attribute mods
+        if extraDict.has_key(key):
+            for eattr, evalue in extraDict[key]:
+                axisattrs[eattr] = evalue
+        cleanupAttrs(axisattrs)
+        # node.setExternalDict(axisattrs)
+        setNodeDict(node, axisattrs)
+        validateAttrs(node)
+        datasetnode.addId(axis.id, node)
+
+    keys = vardict.keys()
+    keys.sort()
+    for key in keys:
+        if (includeList is not None) and (key not in includeList):
+            continue
+        if (excludeList is not None) and (key in excludeList):
+            continue
+        domain, attrs, tcode = vardict[key]
+        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
+            tcode = numpy.sctype2char(tcode)
+        domainNode = cdmsNode.DomainNode()
+        cdtype = cdmsNode.NumericToCdType[tcode]
+        node = cdmsNode.VariableNode(key, cdtype, domainNode)
+        cleanupAttrs(attrs)
+        # node.setExternalDict(attrs)
+        setNodeDict(node, attrs)
+        validateAttrs(node)
+        for axis in domain:
+            if hasattr(axis,'length'):
+                length = axis.length
+            else:
+                length = len(axis)
+            try:
+                elemnode = cdmsNode.DomElemNode(axis.id, 0, length)
+            except AttributeError:
+                print >> sys.stderr,  'Axis %s for variable %s does not have attribute "id"'%(`axis`, key)
+            if hasattr(axis, 'partition_length'):
+                elemnode.setExternalAttr('partition_length',axis.partition_length)
+            domainNode.add(elemnode)
+        datasetnode.addId(key, node)
+
+    # Add the Conventions attribute if not present
+    conventions = datasetnode.getExternalAttr('Conventions')
+    if conventions is None: datasetnode.setExternalAttr('Conventions','')
+    if templatestr is not None:
+        datasetnode.setExternalAttr('template',templatestr)
+
+    # Add/modify history
+    history = datasetnode.getExternalAttr('history')
+    if history is None:
+        history = ""
+    stringargv = reduce(lambda x,y: x+' '+y, argv)
+    stringtime = "\n[%s] "%timestamp()
+    if len(stringargv)<=256:
+        history += stringtime+stringargv
+    else:
+        history += stringtime+stringargv[:256]+" ..."
+    datasetnode.setExternalAttr('history',history)
+
+    ## datasetnode.validate()
+    if writeToStdout:
+        datasetnode.dump()
+    else:
+        datasetnode.dump(xmlpath)
+        if verbose:
+            print xmlpath,'written'
+
+#--------------------------------------------------------------------------------------------------------------------------
+if __name__ == '__main__':
+    main(sys.argv)
+    try:
+        from mpi4py import MPI
+        comm = MPI.Comm.Get_parent()
+        comm.send('done', dest=0)
+    except:
+        pass
+        
diff --git a/Packages/cdms2/Lib/grid.py b/Packages/cdms2/Lib/grid.py
index f11ca1764d923153698d053963009c7dd4640b80..9930f263a676ab3312f9705336ffcc4f83fc9013 100644
--- a/Packages/cdms2/Lib/grid.py
+++ b/Packages/cdms2/Lib/grid.py
@@ -496,14 +496,6 @@ class AbstractRectGrid(AbstractGrid):
         else:
             latbnds = lat.genGenericBounds()
 
-        # Stretch latitude bounds to +/- 90.0
-        if ascending:
-            latbnds[0,0] = min(latbnds[0,0],-90.0)
-            latbnds[-1,1] = max(latbnds[-1,1],90.0)
-        else:
-            latbnds[0,0] = max(latbnds[0,0],+90.0)
-            latbnds[-1,1] = min(latbnds[-1,1],-90.0)
-
         # Get longitude bounds
         lon = self.getLongitude()
         if len(lon)>1:
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index 27cab8156879a543d753028bb062ac01ae52f128..152875adf58480778e567c0cb7b0f59074fff31a 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -384,8 +384,9 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         if n < 0: n = n + self.rank()
         if not isinstance(axis, AbstractAxis):
             raise CDMSError,"copydimension, other not an axis."
-        b = axis.getBounds()
-        mycopy = createAxis(axis[:], b)
+        isGeneric = [False]
+        b = axis.getBounds(isGeneric)
+        mycopy = createAxis(axis[:], b, genericBounds=isGeneric[0])
         mycopy.id = axis.id
         for k, v in axis.attributes.items():
            setattr(mycopy, k, v)
diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Script/cdscan
deleted file mode 100755
index 59896b1b43a58b5a1192d31915eaa88cf3d13ae2..0000000000000000000000000000000000000000
--- a/Packages/cdms2/Script/cdscan
+++ /dev/null
@@ -1,1688 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import getopt
-import cdms2
-from cdms2.grid import lookupArray
-from cdms2.axis import calendarToTag, tagToCalendar
-from cdms2.cdmsobj import CdFromObject,CdString,CdScalar,CdFloat,CdDouble,CdShort,CdInt,CdLong
-import numpy
-import string
-import cdtime
-import os.path
-import copy
-import types
-from cdms2 import cdmsNode
-import re
-
-usage = """Usage:
-    cdscan [options] <files>
-
-    Scan a list of files producing a CDMS dataset in XML representation. See Notes below
-    for a more complete explanation.
-
-Arguments:
-
-    <files> is a list of file paths to scan. The files can be listed in any order, and may
-    be in multiple directories.  A file may also be a CDML dataset (.xml or .cdml), in
-    which case the dataset(s) and files are combined into a new dataset.
-
-Options:
-
-    -a alias_file: change variable names to the aliases defined in an alias file.
-                   Each line of the alias file consists of two blank separated
-                   fields: variable_id alias. 'variable_id' is the ID of the variable
-                   in the file, and 'alias' is the name that will be substituted for
-                   it in the output dataset. Only variables with entries in the alias_file
-                   are renamed.
-
-    -c calendar:   either "gregorian", "proleptic_gregorian", "julian", "noleap", or "360_day". Default:
-                   "gregorian". This option should be used with caution, as it will
-                   override any calendar information in the files.
-
-    -d dataset_id: dataset identifier. Default: "none"
-
-    -e newattr:	   Add or modify attributes of a file, variable, or
-		   axis. The form of 'newattr' is either:
-
-		   'var.attr = value' to modify a variable or attribute, or
-		   '.attr = value' to modify a global (file) attribute.
-
-		   In either case, 'value' may be quoted to preserve spaces
-		   or force the attribute to be treated as a string. If
-		   'value' is not quoted and the first character is a
-		   digit, it is converted to integer or
-		   floating-point. This option does not modify the input
-		   datafiles. See notes and examples below.
-
-    --exclude var,var,...
-                   Exclude specified variables. The argument
-                   is a comma-separated list of variables containing no blanks.
-                   In contrast to --exclude-file, this skips the variables regardless
-                   of the file(s) in which they are contained, but processes other
-                   variables in the files.
-                   Also see --include.
-
-    --exclude-file pattern
-                   Exclude files with a basename matching the regular expression pattern.
-                   In contrast to --exclude, this skips the file entirely. Multiple patterns
-                   may be listed by separating with vertical bars (e.g. abc|def ). Note
-                   that the match is to the initial part of the basename. For example, the
-                   pattern 'st' matches any basename starting with 'st'.
-
-    -f file_list:  file containing a list of absolute data file names, one per
-                   line. <files> arguments are ignored.
-
-    --forecast     generate a description of a forecast dataset.
-                   This is not compatible with the -i, -r, -t, or -l options.
-                   A file can contain data for exactly one forecast; its
-                   forecast_reference_time (aka run time, analysis time, starting time,
-                   generating time, tau=0 time) is specified by the nbdate,nbsec variables.
-                   Each file's time axis will be interpreted as the forecast_period (aka
-                   tau, the interval from the forecast_reference_time to the current time)
-                   regardless of its units, standard_name, or other attributes.
-
-    -h:            print a help message.
-
-    -i time_delta: scan time as a 'linear' dimension. This is useful if the time dimension
-                   is very long. The argument is the time delta, a float or integer.  For
-                   example, if the time delta is 6 hours, and the reference units are
-                   "hours since xxxx", set the interval delta to 6.  The default value is
-                   the difference of the first two timepoints.
-
-    --ignore-open-error:
-                   Ignore open errors. Print a warning and continue.
-
-    --include var,var,...
-                   Only include specified variables in the output. The argument
-                   is a comma-separated list of variables containing no blanks.
-                   Also see --exclude.
-
-    --include-file pattern
-                   Only include files with a basename matching the regular expression pattern.
-                   In contrast to --include, this skips files entirely if they do not
-                   match the pattern. Multiple patterns
-                   may be listed by separating with vertical bars (e.g. abc|def ). Note
-                   that the match is to the initial part of the basename. For example, the
-                   pattern 'st' matches any basename starting with 'st'.
-
-    -j:		   scan time as a vector dimension. Time values are listed
-		   individually. Turns off the -i option.
-
-    -l levels:     list of levels, comma-separated. Only specify if files are partitioned by
-                   levels.
-
-    -m levelid:    name of the vertical level dimension. The default is the name of the
-                   vertical level dimension
-
-    --notrim-lat:  Don't trim latitude values (in degrees) to the range [-90..90]. By default
-		   latitude values are trimmed.
-
-    -p template:   Compatibility with pre-V3.0 datasets. 'cdimport -h' describes template strings.
-
-    -q:            quiet mode
-
-    -r time_units: time units of the form "<units> since yyyy-mm-dd hh:mi:ss", where
-                   <units> is one of "year", "month", "day", "hour", "minute", "second".
-                   Trailing fields may be omitted. The default is the units of the first
-                   time dimension found.
-
-    -s suffix_file: Append a suffix to variable names, depending on the directory
-                   containing the data file.  This can be used to distinguish variables
-                   having the same name but generated by different models or ensemble
-                   runs. 'suffix_file' is the name of a file describing a mapping between
-                   directories and suffixes.  Each line consists of two blank-separated
-                   fields: 'directory' 'suffix'. Each file path is compared to the
-                   directories in the suffix file. If the file path is in that directory
-                   or a subdirectory, the corresponding suffix is appended to the variable
-                   IDs in the file. If more than one such directory is found, the first
-                   directory found is used. If no match is made, the variable ids are not
-                   altered.  Regular expressions can be used: see the example in the Notes
-                   section.
-
-    -t timeid:     id of the partitioned time dimension. The default is the name of the time
-                   dimension.
-
-    --time-linear tzero,delta,units[,calendar]
-                   Override the time dimensions(s) with a linear time dimension. The arguments are
-                   a comma-separated list:
-                   
-                   tzero is the initial time point, a floating-point value.
-                   delta is the time delta, floating-point.
-                   units are time units as specified in the [-r] option.
-                   calendar is optional, and is specified as in the [-c] option. If omitted, it
-                     defaults to the value specified by [-c], otherwise as specified in the file.
-
-                   Example: --time-linear '0,1,months since 1980,noleap'
-
-                   Note (6) compares this option with [-i] and [-r]
-
-    --var-locate 'var,file_pattern':
-                   Only scan a variable if the basename of the file matches the pattern. This
-                   may be used to resolve duplicate variable errors. var and file_pattern are
-                   separated by a comma, with no blanks.
-                   
-                   var is the name of the variable
-                   file_pattern is a regular expression following the Python re module syntax.e
-
-                   Example: to scan variable ps from files starting with the string 'ps_':
-                     --var-locate 'ps,ps_.*'
-
-    -x xmlfile:    XML filename. By default, output is written to standard output.
-
-Example:
-
-    cdscan -c noleap -d test -x test.xml [uv]*.nc
-    cdscan -d pcmdi_6h -i 0.25 -r 'days since 1979-1-1' *6h*.ctl
-
-Notes:
-
-    (1) The files can be in netCDF, GrADS/GRIB, HDF, or DRS format, and can be listed in
-    any order. Most commonly, the files are the result of a single experiment, and the
-    'partitioned' dimension is time. The time dimension of a variable is the coordinate
-    variable having a name that starts with 'time' or having an attribute "axis='T'". If
-    this is not the case, specify the time dimension with the -t option. The time
-    dimension should be in the form supported by cdtime. If this is not the case (or to
-    override them) use the -r option.
-
-    (2) The basic form of the command is 'cdscan <files>'. By default, the time values are
-    listed explicitly in the output XML. This can cause a problem if the time dimension is
-    very long, say for 6-hourly data. To handle this the form 'cdscan -i delta <files>'
-    may be used. This generates a compact time representation of the form <start, length,
-    delta>. An exception is raised if the time dimension for a given file is not linear.
-
-    (3) Another form of the command is 'cdscan -l lev1,lev2,..,levn <files>'. This asserts
-    that the dataset is partitioned in both time and vertical level dimensions. The level
-    dimension of a variable is the dimension having a name that starts with "lev", or
-    having an attribute "axis=Z". If this is not the case, set the level name with the -m
-    option.
-
-    (4) An example of a suffix file:
-
-    /exp/pr/ncar-a  _ncar-a
-    /exp/pr/ecm-a   _ecm-a
-    /exp/ta/ncar-a  _ncar-a
-    /exp/ta/ecm-a   _ecm-a
-
-    For all files in directory /exp/pr/ncar-a or a subdirectory, the corresponding
-    variable ids will be appended with the suffix '_ncar-a'.  Regular expressions can be
-    used, as defined in the Python 're' module. For example, The previous example can be
-    replaced with the single line:
-
-    /exp/[^/]*/([^/]*) _\g<1>
-
-    Note the use of parentheses to delimit a group. The syntax \g<n> refers to the n-th
-    group matched in the regular expression, with the first group being n=1. The string
-    [^/]* matches any sequence of characters other than a forward slash.
-
-    (5) Adding or modifying attributes with the -e option:
-
-    time.units = "days since 1979-1-1"
-
-    sets the units of all variables/axes to "Days since 1979-1-1". Note
-    that since this is done before any other processing is done, it allows
-    overriding of non-COARDS time units.
-
-    .newattr=newvalue
-
-    Set the global file attribute 'newattr' to 'newvalue'.
-
-    (6) The [--time-linear] option overrides the time values in the file(s). The resulting
-    dimension does not have any gaps. In contrast, the [-i], [-r] options use the specified
-    time units (from [-r]), and calendar from [-c] if specified, to convert the file times
-    to the new units. The resulting linear dimension may have gaps.
-
-    In either case, the files are ordered by the time values in the files.
-
-    The [--time-linear] option should be used with caution, as it is applied to all the time
-    dimensions found.
-"""
-
-# Ensure that arrays are fully printed to XML files
-numpy.set_printoptions(threshold=numpy.inf)
-
-calendarMap = tagToCalendar
-
-reverseCalendarMap = calendarToTag
-
-attrPattern = re.compile(r'\s*(\w*)\.(\w+)\s*=\s*(.*)$')
-cdms2.setNetcdfUseParallelFlag(0)
-def timestamp():
-    "Generate a timestamp."
-    import time
-    y,m,d,h,mi,s,w,dy,ds = time.gmtime(time.time())
-    return "%d-%d-%d %d:%d:%d"%(y,m,d,h,mi,s)
-
-def timeindex(value, units, basetime, delta, calendar):
-    """ Calculate (t - basetime)/delu
-    where t = reltime(value, units)
-    and delu is the time interval (delta, delunits) (e.g., 1 month).
-    """
-    if string.find(units," as ")==-1:
-        tval = cdtime.reltime(value, units)
-    else:
-        tval = cdtime.abstime(value, units)
-    newval = tval.torel(basetime, calendar)
-    if delta is None:
-        return newval.value
-    else:
-        return newval.value/delta
-
-def combineKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
-    """ Combine dictionary keys into an axis.
-    dict: (i,j) => (path, axisname)
-    typedict is either timedict or levdict or fcdict.
-    timeIsLinear is true iff time has a linear representation.
-    referenceDelta is only used for error checks if timeIsLinear is true.
-    """
-
-    global verbose
-
-    # Sort the projected time, level indices
-    keys = dict.keys()
-    keys.sort()
-
-    axislist = []
-    prevend = None
-    prevpath = None
-    name0 = None
-    compressPart = []
-    partition = []
-    previ = 0
-    firstunits = None
-    prevvals = None
-    coordToInd = {(None,None):(None,None)}
-    linCoordToInd = {(None,None):(None,None)}
-    iadj = None
-    errorOccurred = 0
-    for i0, i1 in keys:
-        path, name = dict[(i0, i1)]
-        if name0 is None:
-            name0 = name
-        values, units, dummy = typedict[(path,name)]
-        if firstunits is None:
-            firstunits = units
-        if prevend is not None and prevend>=i0:
-            if prevend>=i1:
-                if verbose:
-                    print >> sys.stderr,  'Warning, file %s, dimension %s contains values in file %s'%(prevpath,name,path)
-                if timeIsLinear:
-                    iind = lookupArray(prevvals, values[0])
-                    jind = lookupArray(prevvals, values[-1])
-                else:
-                    iind = lookupArray(prevvals, i0)
-                    jind = lookupArray(prevvals, i1)
-                if len(values)!=(jind-iind+1):
-                    raise RuntimeError, 'Dimension %s in files %s [len(%s)=%d], %s [len(%s)=%d], is inconsistent'%(name, prevpath, name, (jind-iind+1), path, name, len(values))
-                coordToInd[(i0,i1)] = (iind, jind)
-                
-                prevspart, prevepart = partition[-1]
-                linCoordToInd[(i0,i1)] = (prevspart+iind, prevspart+jind+1)
-                continue
-            else:                       # Fix partial overlap
-                if timeIsLinear:
-                    jind = lookupArray(prevvals, values[0])
-                else:
-                    jind = lookupArray(prevvals, i0)
-                if verbose:
-                    print >> sys.stderr,  'Warning, file %s, dimension %s overlaps file %s, value=%f'%(prevpath,name,path,prevvals[jind])
-                previ, prevj = compressPart[-1]
-                prevj = previ + jind
-                axislist[-1] = prevvals[0:jind]
-                compressPart[-1] = (previ, prevj)
-                coordToInd[(prevvals[0], prevvals[-1])] = (previ, prevj)
-                previ = prevj
-
-                prevspart, prevepart = partition[-1]
-                prevepart = prevspart + jind
-                partition[-1] = (prevspart, prevepart)
-                linCoordToInd[(prevvals[0], prevvals[-1])] = (prevspart, prevepart)
-
-        axislist.append(values)
-        prevend = i1
-        prevpath = path
-        prevj = previ+len(values)
-        compressPart.append((previ, prevj))
-        coordToInd[(i0,i1)] = (previ, prevj)
-
-        if iadj is None:                # partition has to start with 0
-            iadj = int(i0)
-        spart = int(i0) - iadj
-        epart = int(i1) + 1 - iadj
-        partition.append((spart, epart))
-        linCoordToInd[(i0,i1)] = (spart, epart)
-        if timeIsLinear and len(values)!=(epart-spart):
-            # Find the bad values
-            diffs = values[1:]-values[:-1]
-            badindices = numpy.compress(numpy.not_equal(diffs,referenceDelta),range(len(values)))
-            badvalues = numpy.take(values, badindices)
-            if verbose:
-                print >> sys.stderr,  "Error: Missing values in %s after times: %s. Set delta with the -i option or turn off linear mode with the -j option."%(path,str(badvalues))
-            errorOccurred = 1
-
-        prevvals = values
-        previ = prevj
-        
-    fullaxis = numpy.ma.concatenate(axislist)
-    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
-
-def useKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
-    """ Use dictionary keys for an axis.  This is like combineKeys (same arguments, same return values,
-    was written by simplifying combineKeys), but this doesn't do nearly so much because this is
-    for an axis where there is no splitting across files, hence partitions are not needed.
-    dict: (i,j) => (path, axisname)
-    typedict is either timedict or levdict or fcdict.
-    timeIsLinear is true iff time has a linear representation.
-    referenceDelta is only used for error checks if timeIsLinear is true.
-    """
-    global verbose
-
-    # Sort the projected time, level indices
-    keys = dict.keys()
-    keys.sort()
-
-    axislist = []
-    name0 = None
-#    compressPart = []
-    compressPart = None
-#    partition = []
-    partition = None
-#    previ = 0
-    firstunits = None
-#    coordToInd = {(None,None):(None,None)}
-#    linCoordToInd = {(None,None):(None,None)}
-    coordToInd = None
-    linCoordToInd = None
-    errorOccurred = 0
-    for i0, i1 in keys:
-        path, name = dict[(i0, i1)]
-        if name0 is None:
-            name0 = name
-        values, units, dummy = typedict[(path,name)]
-        if firstunits is None:
-            firstunits = units
-        axislist.append(values)
-#        prevj = previ+len(values)
-#        coordToInd[(i0,i1)] = (previ, prevj)
-
-    fullaxis = numpy.ma.concatenate(axislist)
-    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
-
-def copyDict(dict):
-    """Copy a dictionary-like object dict to a true dictionary"""
-    result = {}
-    for key in dict.keys():
-        result[key] = dict[key]
-
-    return result
-
-def disambig(name, dict, num, comparator, value):
-    """ Make an unique name from name, wrt to the keys in dictionary dict.
-    Try using num first. comparator(value,dict[name]) returns 0 if equal, 1 if not.
-    """
-    if not dict.has_key(name) or not comparator(value, dict[name]):
-        uniqname = name
-    else:
-        uniqname = '%s_%d'%(name,num)
-        if dict.has_key(uniqname) and comparator(value, dict[uniqname]):
-            trial_name = uniqname
-            for letter in string.lowercase:
-                uniqname = '%s_%s'%(trial_name,letter)
-                if not dict.has_key(uniqname) or not comparator(value, dict[uniqname]):
-                    break
-            else:
-                raise 'Cannot make axis name unique: ',name
-
-    return uniqname
-
-def compareaxes(axis1, axis2):
-    """Return 0 if equal, 1 if not"""
-    return ((len(axis1)!=len(axis2)) or not numpy.ma.allclose(axis1[:],axis2[:]))
-
-def comparedomains(domain1, domain2):
-    """Return 0 if equal, 1 if not"""
-    if len(domain1)!=len(domain2):
-        return 1
-    for i in range(len(domain1)):
-        item1 = domain1[i]
-        item2 = domain2[i]
-        if type(item1)!=type(item2):
-            return 1
-        if type(item1)==types.StringType:
-            return item1!=item2
-        elif compareaxes(item1, item2):
-            return 1
-    return 0
-
-def compareVarDictValues(val1, val2):
-    return comparedomains(val1[0], val2[0])
-
-def cleanupAttrs(attrs):
-    for attname in attrs.keys():
-        attval = attrs[attname]
-        if type(attval) is numpy.ndarray:
-            if len(attval)==1:
-                attrs[attname] = attval[0]
-            else:
-                attrs[attname] = str(attval)
-    if attrs.has_key('missing_value') and attrs['missing_value'] is None:
-        del attrs['missing_value']
-
-def validateAttrs(node):
-    """Compare attributes against DTD."""
-
-    global verbose
-
-    if hasattr(node,'datatype'):
-        parenttype = node.datatype
-    else:
-        parenttype = None
-    atts = node.getExternalDict()
-    for attname in atts.keys():
-        (attval,datatype)=atts[attname] # (XML value, datatype)
-        constraint = node.extra.get(attname)
-        if constraint is not None:
-            (scaletype,reqtype)=constraint # (CdScalar|CdArray, required type)
-            if reqtype==CdFromObject:
-                reqtype = parenttype
-            if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
-                if reqtype in (CdFloat,CdDouble) and type(attval)!=types.FloatType:
-                    try:
-                        attval = string.atof(attval)
-                    except:
-                        if verbose:
-                            print >> sys.stderr,  "Warning: %s=%s should be a float, id=%s"%(attname,attval,node.id),
-                        try:
-                            attval = string.atoi(attval)
-                            attval = float(attval)
-                            if verbose:
-                                print "(Recasting)"
-                            node.setExternalAttr(attname,attval)
-                        except:
-                            if attname in ['modulo', 'add_offset', 'scale_factor']:
-                                if verbose:
-                                    print "(Removing)"
-                                attdict = node.getExternalDict()
-                                del attdict[attname]
-                            else:
-                                if verbose:
-                                    print ""
-                elif reqtype in (CdShort,CdInt,CdLong) and type(attval)!=types.IntType:
-                    try:
-                        attval = string.atoi(attval)
-                    except:
-                        if verbose:
-                            print >> sys.stderr,  "Warning: %s=%s should be an integer, id=%s"%(attname,attval,node.id),
-                        try:
-                            attval = string.atof(attval)
-                            attval = int(attval)
-                            if verbose:
-                                print "(Recasting)"
-                            node.setExternalAttr(attname,attval)
-                        except:
-                            if verbose:
-                                print ""
-
-def cloneWithLatCheck(axis):
-    """Clone an axis, ensuring that latitudes (in degrees) are in the range [-90:90]"""
-
-    global verbose
-    global notrimlat
-
-    axisvals = origvals = axis[:]
-    if axis.isLatitude() and hasattr(axis,"units") and string.lower(axis.units[0:6])=="degree":
-        if notrimlat==0:
-            axisvals = numpy.maximum(-90.0, numpy.minimum(90.0,axisvals))
-        if not numpy.ma.allclose(axisvals, origvals) and verbose:
-            print >> sys.stderr,  "Warning: resetting latitude values: ",origvals," to: ",axisvals
-
-    b = axis.getBounds()
-    mycopy = cdms2.createAxis(copy.copy(axisvals))
-    mycopy.id = axis.id
-    try:
-        mycopy.setBounds(b)
-    except CDMSError:
-        b = mycopy.genGenericBounds()
-        mycopy.setBounds(b)
-    for k, v in axis.attributes.items():
-       setattr(mycopy, k, v)
-    return mycopy
-
-def addAttrs(fobj, eattrs):
-    """Add extra attributes to file/dataset fobj.
-    eattrs has the form [(varid,attr,value), (varid,attr,value), ...]
-    where if varid is '', set the global attribute."""
-    for evar,eattr,evalue in eattrs:
-        if evar=='':
-            fobj.__dict__[eattr] = evalue
-        else:
-            varobj = fobj[evar]
-            if varobj is not None:
-                varobj.__dict__[eattr] = evalue
-
-def setNodeDict(node, dict):
-    for key in dict.keys():
-        value = dict[key]
-        if (isinstance(value, numpy.integer) or isinstance(value, types.IntType)):
-            datatype = CdLong
-        elif (isinstance(value, numpy.floating) or isinstance(value, types.FloatType)):
-            datatype = CdDouble
-        else:
-            datatype = CdString
-        node.attribute[key]=(value,datatype)
-
-def initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
-                        referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
-                        dirlen, overrideCalendar ):
-    # This function was formerly part of the body of "main".
-        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
-        # where tc0 is the first time index relative to the reference time, tc1 the last,
-        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
-        # of the time dimension of the variable, levid is the id of the level dimension
-        # 
-        # timedict : (path, timeid) => (timearray, timeunits, calendar)
-        #
-        # levdict : (path, levelid) => (levelarray, levelunits, None)
-        #
-    initfilemap = cdms2.dataset.parseFileMap(extendDset.cdms_filemap)
-    dsetdirec = extendDset.directory
-    for namelist, slicelist in initfilemap:
-        for name in namelist:
-            var  = extendDset[name]
-            timeaxis = var.getTime()
-            if timeaxis is not None and not overrideCalendar:
-                calendar = timeaxis.getCalendar()
-            if splitOnTime and timeaxis is not None:
-                if hasattr(timeaxis, 'name_in_file'):
-                    timeid = timeaxis.name_in_file
-                else:
-                    timeid = timeaxis.id
-                if referenceTime is None:
-                    referenceTime = timeaxis.units
-                if timeIsLinear in [None,1]:
-                    timeIsLinear = timeaxis.isLinear()
-                    if timeIsLinear:
-                        if len(timeaxis)>1:
-                            referenceDelta = timeaxis[1]-timeaxis[0]
-                        else:
-                            referenceDelta = 1.0
-                    else:
-                        referenceDelta = None
-            else:
-                timeid = None
-            levelaxis = var.getLevel()
-            if splitOnLevel and levelaxis is not None:
-                if hasattr(levelaxis, 'name_in_file'):
-                    levid = levelaxis.name_in_file
-                else:
-                    levid = levelaxis.id
-            else:
-                levid = None
-
-            varmaplist = []
-            for t0, t1, lev0, lev1, path in slicelist:
-                fullpath = os.path.join(dsetdirec,path)
-                basepath = fullpath[dirlen:]
-                if t0 is not None:
-                    tc0 = timeindex(timeaxis[t0], timeaxis.units, referenceTime, referenceDelta, calendar)
-                    tc1 = timeindex(timeaxis[t1-1], timeaxis.units, referenceTime, referenceDelta, calendar)
-                    if not timedict.has_key((basepath, timeid, calendar)):
-                        values = timeaxis[t0:t1]
-                        timedict[(basepath, timeid)] = (values, timeaxis.units, calendar)
-                else:
-                    tc0 = tc1 = None
-                if lev0 is not None:
-                    lc0 = levelaxis[lev0]
-                    lc1 = levelaxis[lev1-1]
-                    if not levdict.has_key((basepath, levid, None)):
-                        values = levelaxis[lev0:lev1]
-                        levdict[(basepath, levid)] = (values, levelaxis.units, None)
-                else:
-                    lc0 = lc1 = None
-                varmaplist.append((tc0, tc1, lc0, lc1, basepath, timeid, levid, calendar))
-            if filemap.has_key(name):
-                filemap[name].extend(varmaplist)
-            else:
-                filemap[name] = varmaplist
-
-#---------------------------------------------------------------------------------------------
-
-verbose = 1
-
-def main(argv):
-
-    global verbose
-    global notrimlat
-
-    try:
-        args, lastargs = getopt.getopt( \
-            argv[1:], "a:c:d:e:f:hi:jl:m:p:qr:s:t:x:", \
-            ["include=","include-file=","exclude=","exclude-file=","forecast","time-linear=", \
-             "notrim-lat","var-locate=","ignore-open-error" ] )
-    except getopt.error:
-        print >> sys.stderr,  sys.exc_value
-        print >> sys.stderr,  usage
-        sys.exit(0)
-
-    calendar = None
-    calenkey = None
-    timeid = None
-    levelid = None
-    notrimlat = 0
-    referenceTime = None
-    referenceDelta = None
-    readFromFile = 0
-    splitOnTime = 1
-    splitOnLevel = 0
-    datasetid = "none"
-    timeIsLinear = None
-    writeToStdout = 1
-    templatestr = None
-    timeIsVector = None
-    modelMapFile = None
-    aliasMapFile = None
-    overrideCalendar = 0
-    extraAttrs = []
-    extraDict = {}
-    includeList = None
-    excludeList = None
-    overrideTimeLinear = None
-    varLocate = None
-    ignoreOpenError = False
-    excludePattern = None
-    includePattern = None
-    forecast = False
-    for flag, arg in args:
-        if flag=='-a':
-            aliasMapFile = arg
-        elif flag=='-c':
-            calenkey = string.lower(arg)
-            calendar = calendarMap[calenkey]
-            overrideCalendar = 1
-        elif flag=='-d':
-            datasetid = arg
-        elif flag=='-e':
-            matchObj = attrPattern.match(arg)
-            if matchObj is None:
-                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
-            matchGroups = matchObj.groups()
-            if len(matchGroups)!=3:
-                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
-            matchValue = matchGroups[2]
-            if len(matchValue)>0 and (matchValue[0].isdigit() or matchValue[0] in ['"',"'","-","+"]): #"
-                matcheval = eval(matchValue)
-            else:
-                matcheval = str(matchValue)
-            extraAttrs.append((matchGroups[0], matchGroups[1], matcheval))
-        elif flag=='--exclude':
-            if arg[0]=='-':
-                raise RuntimeError, "--exclude option requires an argument"
-            excludeList = string.split(arg,',')
-        elif flag=='--exclude-file':
-            excludePattern = arg
-        elif flag=='-f':
-            readFromFile = 1
-            filelistpath = arg
-        elif flag=='--forecast':  # experimental forecast mode
-            forecast = True
-            splitOnTime = 0
-            splitOnLevel = 0
-        elif flag=='-h':
-            print usage
-            sys.exit(0)
-        elif flag=='-i':
-            splitOnTime = 1
-            referenceDelta = string.atof(arg)
-            timeIsLinear = 1
-            timeIsVector = None
-        elif flag=='--ignore-open-error':
-            ignoreOpenError = True
-        elif flag=='--include':
-            if arg[0]=='-':
-                raise RuntimeError, "--include option requires an argument"
-            includeList = string.split(arg,',')
-        elif flag=='--include-file':
-            includePattern = arg
-        elif flag=='-j':
-            timeIsVector = 1
-            timeIsLinear = None
-        elif flag=='-l':
-            splitOnLevel = 1
-            levelstr = string.split(arg,',')
-            levellist = map(string.atof, levelstr)
-            levels = numpy.array(levellist)
-            levels = numpy.sort(levels)
-        elif flag=='-m':
-            levelid = arg
-            args.append(('-e','%s.axis=Z'%levelid)) # Add axis=Z attribute
-        elif flag=='--notrim-lat':
-            notrimlat = 1
-        elif flag=='-p':
-            templatestr = arg
-        elif flag=='-q':
-            verbose = 0
-        elif flag=='-r':
-            splitOnTime = 1
-            referenceTime = arg
-        elif flag=='-s':
-            modelMapFile = arg
-        elif flag=='-t':
-            splitOnTime = 1
-            timeid = arg
-            args.append(('-e','%s.axis=T'%timeid)) # Add axis=T attribute
-        elif flag=='--time-linear':
-            targlist = string.split(arg,',')
-            ttzero = string.atof(targlist[0])
-            tdelta = string.atof(targlist[1])
-            tunits = string.strip(targlist[2])
-            if len(targlist)==4:
-                tcalendar = string.strip(targlist[3])
-            else:
-                tcalendar = None
-            overrideTimeLinear = [ttzero,tdelta,tunits,tcalendar]
-        elif flag=='--var-locate':
-            if varLocate is None:
-                varLocate = {}
-            vname, pattern = string.split(arg,',')
-            varLocate[vname]=pattern
-        elif flag=='-x':
-            writeToStdout = 0
-            xmlpath = arg
-
-    # If overriding time, process time as vector so that no gaps result
-    if overrideTimeLinear is not None:
-        timeIsVector = 1
-        timeIsLinear = None
-        if overrideCalendar==1:
-            overrideTimeLinear[3]=calenkey
-
-    if verbose:
-        print 'Finding common directory ...'
-    if readFromFile:
-        f = open(filelistpath)
-        lastargs = f.readlines()
-        f.close()
-
-    # Ignore blank paths
-    realargs = []
-    for arg in lastargs:
-        sarg = string.strip(arg)
-        if len(sarg)>0:
-            realargs.append(sarg)
-    lastargs = realargs
-
-    # Split lastargs into files and datasets
-    fileargs = []
-    dsetargs = []
-    for arg in lastargs:
-        base, suffix = os.path.splitext(arg)
-        if string.lower(suffix) in ['.xml','.cdml']:
-            dsetargs.append(arg)
-        else:
-            fileargs.append(arg)
-
-    # Generate a list of pathnames for datasets
-    dsetfiles = []
-    for path in dsetargs:
-        dset = cdms2.open(path)
-        if not hasattr(dset, 'cdms_filemap'):
-            raise RuntimeError,'Dataset must have a cdms_filemap attribute: '+path
-        if not hasattr(dset, 'directory'):
-            raise RuntimeError,'Dataset must have a directory attribute: '+path
-        dsetdirec = dset.directory
-        initfilemap = cdms2.dataset.parseFileMap(dset.cdms_filemap)
-        for namelist, slicelist in initfilemap:
-            for t0, t1, lev0, lev1, path in slicelist:
-                dsetfiles.append(os.path.join(dsetdirec, path))
-    augmentedArgs = fileargs + dsetfiles
-
-    # Find the common directory
-    directory = os.path.commonprefix(augmentedArgs)
-    firstpath = augmentedArgs[0][len(directory):]
-    if not os.path.isfile(os.path.join(directory,firstpath)):
-        dnew = os.path.dirname(directory)
-        if len(dnew)>0 and directory[len(dnew)]=='/':
-            directory = dnew+'/'
-        else:
-            directory = dnew
-    if verbose:
-        print 'Common directory:',directory
-
-    dirlen = len(directory)
-
-    if templatestr is not None:
-        if os.path.isabs(templatestr):
-            templatestr = templatestr[dirlen:]
-        templatere, ignore = cdms2.cdmsobj.templateToRegex(templatestr)
-        template = re.compile(templatere+'$')
-    else:
-        template = None
-
-    axisdict = {}
-    vardict = {}
-    filemap = {}
-    timedict = {}
-    levdict = {}
-    fcdict = {}
-    global_attrs = None
-    fctau0 = None
-
-    if modelMapFile is not None:
-        mfile = open(modelMapFile)
-        modelMap = {}
-        modelDirs = []
-        for line in mfile.readlines():
-            mdirec, model = string.split(line)
-            modelMap[mdirec] = model
-            modelDirs.append(mdirec)
-        mfile.close()
-
-    if aliasMapFile is not None:
-        afile = open(aliasMapFile)
-        aliasMap = {}
-        for line in afile.readlines():
-            if line[0] not in ["'",'"']: #"
-                varid, alias = string.split(line)
-            else:
-                dummy, varid, alias = string.split(line,line[0])
-                alias = string.strip(alias)
-            aliasMap[varid] = alias
-        afile.close()
-
-    # Save extra attribute information for new axes
-    for evar, eattr, evalue in extraAttrs:
-        if evar=='':
-            continue
-        if extraDict.has_key(evar):
-            curval = extraDict[evar]
-            curval.append((eattr,evalue))
-        else:
-            extraDict[evar] = [(eattr,evalue)]
-
-    #---------------------------------------------------------------------------------------------
-    # Initialize dictionaries if adding to an existing dataset
-    if verbose and len(dsetargs)>0:
-        print 'Scanning datasets ...'
-    for extendPath in dsetargs:
-        if verbose:
-            print extendPath
-        extendDset = cdms2.open(extendPath)
-
-        # Add/modify attributes
-        addAttrs(extendDset, extraAttrs)
-
-        # Copy the global attribute dictionary if necessary. Note that copy.copy
-        # can't be used here, since .attributes is now a 'fake' dictionary.
-        if global_attrs is None:
-            global_attrs = copyDict(extendDset.attributes)
-
-        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
-        # where tc0 is the first time index relative to the reference time, tc1 the last,
-        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
-        # of the time dimension of the variable, levid is the id of the level dimension
-        # 
-        # timedict : (path, timeid) => (timearray, timeunits, calendar)
-        #
-        # levdict : (path, levelid) => (levelarray, levelunits, None)
-        #
-        initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
-                            referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
-                            dirlen, overrideCalendar )
-
-        # axisdict : id => transient_axis
-        #   for non-partitioned axes only
-        #
-        tempmap = {}
-        for axis in extendDset.axes.values():
-            if not ( (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
-                     (splitOnLevel and (axis.isLevel() or axis.id==levelid)) ):
-                axis = cloneWithLatCheck(axis)
-                if axisdict.has_key(axis.id):
-                    currentaxis = axisdict[axis.id]
-
-                    # Check that the axis has the same length and values as the saved value. If not,
-                    # create an unambiguous name in the axis dictionary.
-                    if compareaxes(axis, currentaxis):
-                        sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
-                        axis.name_in_file = axis.id
-                        oldid = axis.id
-                        axis.id = sepname
-                        axisdict[sepname] = axis
-                        tempmap[oldid] = sepname
-                else:
-                    axisdict[axis.id] = axis
-
-        # vardict : varid => [domain, attributeDict, typecode]
-        #   where domain = [axis_or_id, axis_or_id,...]
-        #   and axis_or_id is the id of a partitioned dimension, or
-        #   the transient axis object associated with a non-partitioned dimension
-        #
-        for var in extendDset.variables.values():
-            tempdomain = []
-            for id in var.getAxisIds():
-                if tempmap.has_key(id):
-                    id = tempmap[id]
-                if axisdict.has_key(id):
-                    tempdomain.append(axisdict[id])
-                else:
-                    axis = extendDset[id]
-                    if hasattr(axis,'name_in_file'):
-                        id = axis.name_in_file
-                    tempdomain.append(id)
-            varattrs = copyDict(var.attributes)
-            vardict[var.id] = [tempdomain, varattrs, var.typecode()]
-
-        extendDset.close()
-
-        # end of loop "for extendPath in dsetargs"
-
-    #---------------------------------------------------------------------------------------------
-    if verbose:
-        print 'Scanning files ...'
-
-    boundsmap = {}                      # boundsmap : varid => timebounds_id
-    boundsdict = {}                     # Same as vardict for time bounds
-    for path in fileargs:
-        path = string.strip(path)
-
-        # Check if the path is included
-        if includePattern is not None:
-            base = os.path.basename(path)
-            mobj = re.match(includePattern, base)
-            if mobj is None:
-                continue
-
-        # Check if the path is excluded
-        if excludePattern is not None:
-            base = os.path.basename(path)
-            mobj = re.match(excludePattern, base)
-            if mobj is not None:
-                continue
-
-        if verbose:
-            print path
-        try:
-            f = cdms2.open(path)
-        except:
-            if not ignoreOpenError:
-                raise RuntimeError,'Error opening file '+path
-            else:
-                print >> sys.stderr,  'Warning: cannot open file, skipping: %s'%path
-                continue
-
-        # Add/modify attributes
-        addAttrs(f, extraAttrs)
-
-        # Determine the variable ID suffix, if any
-        varsuffix = None
-        if modelMapFile is not None:
-            for direc in modelDirs:
-                mo = re.match(direc, path)
-                if mo is not None:
-                    suffixPattern = modelMap[direc]
-                    def gensuffix(m, mo=mo):
-                        i = string.atoi(m.group(1))
-                        return mo.group(i)
-                    varsuffix = re.sub(r'\\g<(\d)>', gensuffix, suffixPattern)
-                    break
-
-        # Copy the global attribute dictionary if necessary. Note that copy.copy
-        # can't be used here, since .attributes is now a 'fake' dictionary.
-        if global_attrs is None:
-            global_attrs = copyDict(f.attributes)
-
-        basepath = path[dirlen:]
-        if template is not None and template.match(basepath) is None:
-            if verbose:
-                print >> sys.stderr,  'Warning: path %s does not match template %s'%(basepath, templatestr)
-
-        # Find time boundary variables
-        boundsids = []
-        if splitOnTime:
-            tmpdict = {}
-            for axisname in f.axes.keys():
-                axis = f[axisname]
-                #was if axis.isTime() and hasattr(axis, 'bounds'):
-                if axis.isTime() and (axis.getBounds() is not None):
-                    tmpdict[axis.bounds] = 1
-            boundsids = tmpdict.keys()
-
-        # For forecasts, get the time at which the forecast begins (tau=0) which
-        # is nbdate,nbsec
-        if forecast:
-            nbdate = numpy.int( f('nbdate') )  # f('nbdate') is numpy.int32 which gets truncated
-            nbsec = f('nbsec')
-            fctau0 = nbdate*100000 + nbsec  # hopefully nbsec<(seconds per day)=86400<100000
-            fctau0time = cdtime.abstime( nbdate,"day as %Y%m%d" )
-            fctau0time = fctau0time.add( nbsec, cdtime.Seconds )  # fctau0 as type comptime
-            fc_time_attrs = []
-
-        varnames = f.variables.keys()
-
-        # Try to force all axes to be included, but only small ones, length<100.
-        # This section was motivated by a need to preserve the cloud axes isccp_prs,isccp_tau.
-        # If we ever need to preserve longer axes as well, we could create one variable per axis...
-        crude_var_axes = [ [ ax[0] for ax in var.getDomain() ] for var in f.variables.values() ]
-        var_axes = set().union( *crude_var_axes )
-        other_axes = list( set(f.axes.values()) - var_axes )
-        if len(other_axes)>0:
-            other_axes = [ax for ax in other_axes if len(ax)<100]
-            other_axes.sort( key=(lambda ax:ax.id) )
-            axisvar = cdms2.createVariable( numpy.ones([len(ax) for ax in other_axes]),
-                                            axes=other_axes, id='allaxesdummy')
-            axisvar.autoApiInfo = None    # all CdmsObj objects have this attribute, but for unknown
-            # reasons datasetnode.dump() fails trying to dump this attribute's default value (jfp)
-            varnames.append( axisvar.id )
-        # ...try to force all axes to be considered
-
-        varnames.sort()
-        for varname in varnames:
-
-            # If --var-locate is specified for the variable, match the basename before processing
-            if varLocate is not None and varLocate.has_key(varname):
-                varpattern = varLocate[varname]
-                base = os.path.basename(path)
-                mobj = re.match(varpattern, base)
-                if mobj is None:
-                    continue
-
-            # was var = f.variables[varname]
-            if varname=='allaxesdummy':
-                var = axisvar
-            else:
-                var = f.variables[varname]
-
-            # Reset the variable ID to any specified alias
-            if aliasMapFile is not None:
-                varalias = aliasMap.get(var.id)
-                if varalias is not None:
-                    var.name_in_file = var.id
-                    var.id = varalias
-                    varname = varalias
-
-            # Append a suffix to the variable ID, if applicable
-            if varsuffix is not None:
-                if not hasattr(var, 'name_in_file'):
-                    var.name_in_file = var.id
-                var.id += varsuffix
-                varname += varsuffix
-
-            varentry = [None]*9         # [timestart, timeend, levstart, levend, path, timeid, levid, calendar, fctau0]
-            varentry[4] = basepath
-            varentry[8] = fctau0
-
-            # Generate a temporary domain entry, and
-            # create axis dictionary entries.
-            domain = var.getDomain()
-            if forecast:
-                tempdomain = ['fctau0']
-            else:
-                tempdomain = []         # List of axis names and/or objects (if not partitioned)
-            for axis, start, length, truelen in domain:
-                if (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
-                   (splitOnLevel and (axis.isLevel() or axis.id==levelid)):
-                    tempdomain.append(axis.id)
-                elif forecast and  (axis.isTime() or axis.id==timeid):
-                    # time axis isn't split but needs special treatment for forecasts
-                    tempdomain.append(axis.id)
-                    fc_time_attrs.append(axis.attributes)
-                else:
-                    axis = cloneWithLatCheck(axis) # Transient copy
-                    if axisdict.has_key(axis.id):
-                        currentaxis = axisdict[axis.id]
-
-                        # Check that the axis has the same length and values as the saved value. If not,
-                        # create an unambiguous name in the axis dictionary.
-                        if compareaxes(axis, currentaxis):
-                            sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
-                            axis.name_in_file = axis.id
-                            axis.id = sepname
-
-                            # Fix boundary variable names if using suffixes.
-                            if varsuffix is not None and hasattr(axis, 'bounds'):
-                                axis.bounds += varsuffix
-                            axisdict[sepname] = axis
-                        else:
-                            axis = currentaxis
-                    else:
-                        # Fix boundary variable names if using suffixes.
-                        if varsuffix is not None and hasattr(axis, 'bounds'):
-                            axis.bounds += varsuffix
-                        axisdict[axis.id] = axis
-                    tempdomain.append(axis)
-
-            # Create a dictionary entry for the variable if not already there.
-            if var.id in boundsids:
-                boundsattrs = copyDict(var.attributes)
-                boundsdict[var.id] = [tempdomain, boundsattrs, var.typecode()]
-                continue                # Don't set a filemap entry until axes are sorted out
-            elif not vardict.has_key(var.id):
-                varattrs = copyDict(var.attributes)
-                if varsuffix is not None or aliasMapFile is not None:
-                    varattrs['name_in_file'] = var.name_in_file
-                vardict[var.id] = [tempdomain, varattrs, var.typecode()]
-            else:
-                currentdomain, attrs, tcode = vardict[var.id]
-                if comparedomains(currentdomain, tempdomain):
-                    sepname = disambig(var.id, vardict, var.size(), compareVarDictValues, (tempdomain, None))
-                    saveid = var.id
-                    varname  = var.id = sepname
-                    varattrs = copyDict(var.attributes)
-                    var.name_in_file = varattrs['name_in_file']  = saveid
-                    vardict[sepname] = [tempdomain, varattrs, var.typecode()]
-
-            # Create a filemap entry for this variable/file, if split on time or forecast
-            axisids = map(lambda x: x[0].id, var.getDomain())
-            if splitOnTime or forecast:
-                vartime = None
-                if timeid is not None:
-                    if timeid in axisids:
-                        vartime = f.axes.get(timeid)
-                    else:
-                        if verbose:
-                            print >> sys.stderr,  'Warning, time axis %s not found, -t option ignored'%timeid
-                if vartime is None:
-                    vartime = var.getTime()
-                if vartime is not None:
-                    if not overrideCalendar:
-                        calendar = vartime.getCalendar()
-                    if referenceTime is None:
-                        referenceTime = vartime.units
-                    if verbose and not forecast:
-                        print 'Setting reference time units to', referenceTime
-                    if timeIsLinear is None and timeIsVector is None:
-                        timeIsLinear = (string.lower(string.split(referenceTime)[0]) in ['hour','hours','minute','minutes','second','seconds'])
-                        if timeIsLinear and verbose:
-                            print 'Setting time representation to "linear"' #'
-                    if timeIsLinear and referenceDelta is None:
-                        if len(vartime)>1:
-                            time1 = timeindex(vartime[1], vartime.units, referenceTime, None, calendar)
-                            time0 = timeindex(vartime[0], vartime.units, referenceTime, None, calendar)
-                            referenceDelta = time1 - time0
-                        else:
-                            referenceDelta = 1
-                        if verbose:
-                            print 'Setting time delta to', referenceDelta
-
-#                    starttime = vartime[0]
-#                    endtime = vartime[-1]
-                    startindex = timeindex(vartime[0], vartime.units, referenceTime, referenceDelta, calendar)
-                    endindex = timeindex(vartime[-1], vartime.units, referenceTime, referenceDelta, calendar)
-                    if forecast:
-                        # split on forecast, hence no split on time 
-                        varentry[0] = None
-                        varentry[1] = None
-                        referenceTime = None
-                    else:
-                        varentry[0] = startindex
-                        varentry[1] = endindex
-                    varentry[5] = vartime.id
-                    varentry[7] = calendar
-
-                    if not timedict.has_key((basepath,vartime.id)):
-                        values = vartime[:]
-                        timedict[(basepath,vartime.id)] = (values, vartime.units, calendar)
-
-            if splitOnLevel:
-                varlev = None
-                if (levelid is not None) and (levelid in axisids):
-                    varlev = f.axes.get(levelid)
-                if varlev is None:
-                    varlev = var.getLevel()
-                if varlev is not None:
-                    startlev = varlev[0]
-                    if type(startlev) is numpy.ndarray:
-                        startlev = startlev[0]
-                    endlev = varlev[-1]
-                    if type(endlev) is numpy.ndarray:
-                        endlev = endlev[0]
-                    varentry[2] = startlev
-                    varentry[3] = endlev
-                    varentry[6] = varlev.id
-
-                    if not levdict.has_key((basepath, varlev.id, None)):
-                        values = varlev[:]
-                        levdict[(basepath,varlev.id)] = (values, varlev.units, None)
-
-            if forecast:
-                if not fcdict.has_key((basepath, 'fctau0')):
-                    fcdict[(basepath, 'fctau0')] = ( [fctau0], None, None )
-
-            if filemap.has_key(varname):
-                filemap[varname].append(tuple(varentry))
-            else:
-                filemap[varname] = [tuple(varentry)]
-
-            # Set boundsmap : varid => timebounds_id
-            #was if splitOnTime and vartime is not None and hasattr(vartime, "bounds") and not boundsmap.has_key(varname):
-            if splitOnTime and vartime is not None and (vartime.getBounds() is not None) and\
-                    not boundsmap.has_key(varname):
-                boundsmap[varname] = vartime.bounds
-
-            # End of loop "for varname in varnames"
-
-        f.close()
-        # End of loop "for path in fileargs"
-
-    #---------------------------------------------------------------------------------------------
-
-    # Generate varindex, by combining variable names with
-    # identical varentry values.
-    varindex = []
-    varnames = filemap.keys()
-    varnames.sort()
-    for varname in varnames:
-        varentry = filemap[varname]
-        varentry.sort()
-
-        for varindexname, varindexvalue in varindex:
-            if varentry == varindexvalue:
-                varindexname.append(varname)
-                break
-        else:
-            varindex.append(([varname],varentry))
-
-    # If a variable is not a function of one of the partitioned dimensions,
-    # no indexing is necessary: just read from the first file containing it.
-    for varlist, slicelist in varindex:
-        slice0 = slicelist[0]
-        a,b,c,d,path0,timename,levname,calen,fctau0 = slice0
-        if (a,b,c,d,fctau0)==(None,None,None,None,None):
-            del slicelist[1:]
-
-    # Change times to constant units
-    sameCalendars = 1                   # True iff all time calendars are the same
-    prevcal = None
-    if forecast:
-        # The data files' time axis is interpreted to be tau time, i.e. the forecast_period.
-        # Find the axis, and remember it in timedict.
-        for key in timedict.keys():
-            values, units, calendar = timedict[key]
-            if prevcal is not None and calendar != prevcal:
-                sameCalendars = 0
-            prevcal = calendar
-            if string.find(units," as ")==-1:
-                time0 = cdtime.reltime(values[0],units)
-            else:
-                time0 = cdtime.abstime(values[0],units)
-            offset = time0.torel( units, calendar ).value  # normally will be 0
-            values = values+offset-values[0]
-            # Switch units from "normal" time such as "days since 2001-06-01"
-            # to "basic" time such as "days", which makes sense for a forecast_period.
-            baslen = time0.units.find(' since ')
-            basic_units = time0.units[0:baslen]  # e.g. 'days'
-            fc_units = basic_units
-            timedict[key] = (values, fc_units, calendar)
-    else:       # splitOnTime is true
-        for key in timedict.keys():
-            values, units, calendar = timedict[key]
-            if prevcal is not None and calendar != prevcal:
-                sameCalendars = 0
-            prevcal = calendar
-            if string.find(units," as ")==-1:
-                time0 = cdtime.reltime(values[0],units)
-            else:
-                time0 = cdtime.abstime(values[0],units)
-            offset = time0.torel(referenceTime, calendar).value
-            values = values+offset-values[0]
-            timedict[key] = (values, referenceTime, calendar)
-
-    if sameCalendars and prevcal is not None:
-        calenkey = reverseCalendarMap[calendar]
-        
-    if forecast:
-        # For forecasts, make sure that the above has made all timedict values the same.
-        # >>> It's conceivable that different forecasts will have different time (really, tau)
-        # >>> axes.  If so, at this point we'll want to merge and mask all the time values, so
-        # >>> that all variables can have the same time axis..  For now, just raise an error
-        # >>> if there are time axis differences at this point.
-        values0,units0,calendar0 = timedict[ timedict.keys()[0] ]
-        timedict_same = all( [ ((values0==values).all() and units0==units and calendar0==calendar) \
-                               for (values,units,calendar) in timedict.values() ] )
-        if not timedict_same:
-            raise CDMSError, 'cdscan is confused about times for a forecast set'
-        # Earlier we had saved all the time axis attributes.  Keep whatever they have in common.
-        fc_time_attr = fc_time_attrs[0]
-        for fcta in fc_time_attrs:             # go through all time attributes (each a dictionary)
-            for attrn in fc_time_attr.keys():
-                if not fcta.has_key(attrn):
-                    del fc_time_attr[attrn]    # key attrn isn't in all time attributes
-                elif fcta[attrn]!=fc_time_attr[attrn]:
-                    del fc_time_attr[attrn]    # not all time attributes have the same value for attrn
-        # At this point fc_time_attr is the dictionary of those time attributes which are common to
-        # all time axes encountered (in the context of a forecast dataset).
-        # Finally, add the appropriate standard_name to it, if we haven't already gotten one from
-        # the data file.  If the file has anything other than 'forecast_period', it's wrong, but
-        # we'll stick with it anyway.
-        if not 'standard_name' in fc_time_attr.keys():
-            fc_time_attr['standard_name'] = 'forecast_period'
-        
-    # Create partitioned axes
-    axes = []
-    masterCoordToInd = {}               # varkey => (timeCoordToInd, levCoordToInd)
-    errorOccurred = 0
-    for varlist, varentry in varindex:
-
-        # Project time, level indices
-        timeproj = {}
-        levproj = {}
-        fctproj = {}
-        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varentry:
-            if timename is not None:
-                timeproj[(time0, time1)] = (path, timename)
-            if levname is not None:
-                try:
-                    levproj[(lev0, lev1)] = (path, levname)
-                except:
-                    print >> sys.stderr,  'Cannot hash level %s range (%f,%f)'%(levname,lev0,lev1)
-                    print >> sys.stderr,  type(lev0)
-                    raise
-            if fctau0 is not None:
-                fctproj[(fctau0,fctau0)] = (path, 'fctau0')
-
-        # and combine the projected indices into axes
-        timeCoordToInd = None
-        timelinCoordToInd = None
-        if splitOnTime and timename is not None:
-            fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
-                      combineKeys(timeproj, timedict, timeIsLinear, referenceDelta)
-            axes.append( ( varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
-                           timelinCoordToInd, calendar ) )
-            if errflag: errorOccurred = 1
-        levCoordToInd = None
-        if splitOnLevel and levname is not None:
-            fullaxis, name, partition, levCoordToInd, units, opartition, levlinCoordToInd, errflag = \
-                      combineKeys(levproj, levdict)
-            axes.append((varlist,fullaxis,name,partition,levCoordToInd,units,opartition,levlinCoordToInd, None))
-            if errflag: errorOccurred = 1
-        fcCoordToInd = None
-        if forecast:
-            fullaxis, name, partition, fcCoordToInd, units, opartition, fclinCoordToInd, errflag = \
-                      combineKeys(fctproj, fcdict, forecast=forecast)
-            axes.append((varlist,fullaxis,name,partition,fcCoordToInd,units,opartition,fclinCoordToInd, None))
-            if errflag: errorOccurred = 1
-            if len(timeproj)>0:     # i.e., if time is in this variable's domain.
-                # The useKeys call is like combineKeys, except that it's for a variable not partitioned
-                # among files.  It just sets up axis data and (in the context of this variable loop)
-                # propagates what's in timedict to every variable with time in its domain.
-                fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
-                          useKeys(timeproj, timedict, timeIsLinear, referenceDelta)
-                axes.append( (varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
-                              timelinCoordToInd, calendar) )
-                if errflag: errorOccurred = 1
-            
-
-        masterCoordToInd[varlist[0]] = (timeCoordToInd, levCoordToInd, timelinCoordToInd, fcCoordToInd)
-
-    if errorOccurred:
-        raise RuntimeError, 'Error(s) determining axis values - see previous message(s)'
-    
-    # Eliminate duplicate axes
-    axes2 = []
-    for vlist1, axis1, name1, partition1, coordToInd1, units1, opartition1, linCoordToInd1, calen1 in axes:
-        for vlist2, axis2, name2, partition2, coordToInd2, units2, opartition2, linCoordToInd2, calen2 in axes2:
-            if len(axis1)==len(axis2) and name1==name2 and partition1==partition2 and units1==units2 and \
-                   numpy.ma.allclose(axis1,axis2)==1 and calen1==calen2:
-                vlist2.extend(vlist1)
-                break
-        else:
-            axes2.append((copy.copy(vlist1),axis1, name1, partition1, coordToInd1, units1, opartition1, \
-                          linCoordToInd1, calen1))
-
-    # For each axis described by axis2, disambiguate its name, create the axis object, etc.
-    assignedBounds = {}
-    for vlist, axis, name, partition, coordToInd, units, opartition, linCoordToInd, calendar in axes2:
-        # print vlist, coordToInd
-        uniqname = disambig(name, axisdict, len(axis), compareaxes, axis)
-        axisobj = cdms2.createAxis(axis)
-        axisobj.name_in_file = name
-        axisobj.id = uniqname
-        axisobj.units = units
-        if forecast and axisobj.isTime():   # For forecasts, give the time axis some saved attributes.
-            for attr in fc_time_attr.keys():
-                if not hasattr(axisobj,attr):
-                    setattr(axisobj,attr,fc_time_attr[attr])
-        if timeIsLinear and axisobj.isTime():
-            axisobj.partition = numpy.ma.ravel(numpy.ma.array(opartition))
-            axisobj.length = axisobj.partition[-1]-axisobj.partition[0]
-            mopartition = numpy.array(opartition)
-            partition_length = numpy.sum(mopartition[:,1]-mopartition[:,0])
-            if partition_length<axisobj.length:
-                axisobj.partition_length = partition_length
-        elif partition is not None:
-            axisobj.partition = numpy.ma.ravel(numpy.ma.array(partition))
-        if axisobj.isTime():
-            axisobj.calendar = reverseCalendarMap[calendar]
-        # axisobj.reference_partition = str(numpy.ma.ravel(numpy.ma.array(opartition)))
-        if not axisdict.has_key(uniqname):
-            axisdict[uniqname] = axisobj
-        for varname in vlist:
-            domain, attributes, tcode = vardict[varname]
-            for i in range(len(domain)):
-                item = domain[i]
-                if type(item)==types.StringType and item==name:
-                    domain[i] = axisobj
-
-        # Add bounds variables to vardict, varindex
-        if axisobj.isTime():
-            reprVar = vlist[0]              # 'Representative' variable having this time axis
-            if boundsmap.has_key(reprVar):
-                boundsname = boundsmap[reprVar]
-                boundsinfo = boundsdict[boundsname]
-                boundsattrs = boundsinfo[1]
-                if uniqname!=name:
-                    boundsattrs['name_in_file'] = boundsname
-                    boundsname = uniqname+'_bnds'
-                if not assignedBounds.has_key(boundsname):
-                    axisobj.bounds = boundsname
-                    for varids, ranges in varindex:
-                        if reprVar in varids:
-                            varids.append(boundsname)
-                    tmpdom = boundsinfo[0]
-                    if type(tmpdom[1])==types.StringType:
-                        bndsobj = tmpdom[0]
-                        boundsdomain = (bndsobj, axisobj)
-                    else:
-                        bndsobj = tmpdom[1]
-                        boundsdomain = (axisobj, bndsobj)
-                    vardict[boundsname] = (boundsdomain, boundsinfo[1], boundsinfo[2])
-                    assignedBounds[boundsname] = 1
-
-    # Collapse like indices in filemap. For example, transform
-    # [x,[[0,10,-,-,file1], [0,10,-,-,file2]]] into
-    # [x,[[0,10,-,-,file1]]]
-    # This occurs for variables such as time boundaries, which are
-    # often duplicated in different files.
-    cdms_filemap_list = []
-    duplicatevars = {}
-    for varindexname, varindexvalue in varindex:
-        timeCoordToInd, levCoordToInd, linCoordToInd, fcCoordToInd = masterCoordToInd[varindexname[0]]
-        newslicedict = {}
-        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varindexvalue:
-            if timeCoordToInd is not None:
-                if timeIsLinear:
-                    i0, i1 = linCoordToInd[(time0, time1)]
-                else:
-                    i0, i1 = timeCoordToInd[(time0, time1)]
-            else:
-                i0 = i1 = None
-            if levCoordToInd is not None:
-                j0, j1 = levCoordToInd[(lev0, lev1)]
-            else:
-                j0 = j1 = None
-            if newslicedict.has_key((i0,i1,j0,j1,fctau0)):
-                currentpath = newslicedict[(i0,i1,j0,j1,fctau0)]
-                if not duplicatevars.has_key(tuple(varindexname)):
-                    duplicatevars[tuple(varindexname)] = (currentpath, path)
-            else:
-                newslicedict[(i0,i1,j0,j1,fctau0)] = path
-        keys = newslicedict.keys()
-        keys.sort()
-        newslicelist = []
-        for i0,i1,j0,j1,fctau0 in keys:
-            path = newslicedict[(i0,i1,j0,j1,fctau0)]
-            newslicelist.append([i0, i1, j0, j1, fctau0, path])
-        cdms_filemap_list.append([varindexname, newslicelist])
-
-    # Check if any duplicated variables are a function of longitude or latitude.
-    # Raise an exception if so.
-    illegalvars = []
-    for varlist in duplicatevars.keys():
-        for varname in varlist:
-            if (excludeList is not None) and (varname in excludeList):
-                continue
-            dom, attrs, tcode = vardict[varname]
-            for axisobj in dom:
-                if axisobj.isLatitude() or axisobj.isLongitude():
-                    path1, path2 = duplicatevars[varlist]
-                    illegalvars.append((varname, path1, path2))
-    if len(illegalvars)>0:
-        raise RuntimeError, "Variable '%s' is duplicated, and is a function of lat or lon: files %s, %s"%illegalvars[0]
-        
-    if verbose and len(duplicatevars.values())>0:
-        print >> sys.stderr,  'Duplicate variables:'
-        for varlist in duplicatevars.keys():
-            path1, path2 = duplicatevars[varlist]
-            print >> sys.stderr,  '\t',varlist,'\t',path1,'\t',path2
-
-    # Generate the cdms_filemap attribute
-    cdms_filemap = str(cdms_filemap_list)
-    cdms_filemap = string.replace(cdms_filemap, ' ', '')
-    cdms_filemap = string.replace(cdms_filemap, 'None', '-')
-    cdms_filemap = string.replace(cdms_filemap, '"', '') #"
-    cdms_filemap = string.replace(cdms_filemap, "'", '')
-
-    # Dump to XML
-    datasetnode = cdmsNode.DatasetNode(datasetid)
-    global_attrs['cdms_filemap'] = cdms_filemap
-    global_attrs['directory'] = directory
-    if sameCalendars and calenkey is not None:
-        global_attrs['calendar'] = calenkey
-    elif global_attrs.has_key('calendar'):
-        del global_attrs['calendar']
-    cleanupAttrs(global_attrs)
-    # datasetnode.setExternalDict(global_attrs)
-    setNodeDict(datasetnode, global_attrs)
-    validateAttrs(datasetnode)
-
-    timeWasOverridden = 0
-    keys = axisdict.keys()
-    keys.sort()
-    for key in keys:
-        axis = axisdict[key]
-        tcode = axis.typecode()
-        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
-            tcode = numpy.sctype2char(tcode)
-        cdtype = cdmsNode.NumericToCdType[tcode]
-        node = cdmsNode.AxisNode(axis.id, len(axis), cdtype)
-
-        # Override the time axis as a linear axis
-        if axis.isTime() and (overrideTimeLinear is not None):
-            ttzero = overrideTimeLinear[0]
-            ttdelta = overrideTimeLinear[1]
-            axis.units = overrideTimeLinear[2]
-            if overrideTimeLinear[3] is None:
-                axis.calendar = reverseCalendarMap[axis.getCalendar()]
-            else:
-                axis.calendar = overrideTimeLinear[3]
-            linearnode = cdmsNode.LinearDataNode(ttzero, ttdelta, len(axis))
-            node.setLinearData(linearnode)
-            if verbose:
-                if timeWasOverridden==0:
-                    print "Overriding values for axis '%s'"%axis.id
-                else:
-                    print >> sys.stderr,  'Warning, overriding more than one time axis (%s)'%axis.id
-            timeWasOverridden = 1
-
-        # Represent time as linear axis using time values in the file
-        elif axis.isTime() and timeIsLinear:
-            reference_length = axis.partition[-1]-axis.partition[0]
-            linearnode = cdmsNode.LinearDataNode(axis[0], referenceDelta, reference_length)
-            node.setLinearData(linearnode)
-        else:
-            try:
-                node.setData(axis[:])
-            except cdms2.cdmsNode.NotMonotonicError:
-                if verbose:
-                    print >> sys.stderr,  'Warning: Axis values for axis %s are not monotonic:'%axis.id,axis[:]
-                    print >> sys.stderr,  'Warning: Resetting axis %s values to:'%axis.id, numpy.arange(len(axis))
-                node.setData(numpy.arange(len(axis)))
-        axisattrs = copyDict(axis.attributes)
-
-        # Make sure that new axes have attribute mods
-        if extraDict.has_key(key):
-            for eattr, evalue in extraDict[key]:
-                axisattrs[eattr] = evalue
-        cleanupAttrs(axisattrs)
-        # node.setExternalDict(axisattrs)
-        setNodeDict(node, axisattrs)
-        validateAttrs(node)
-        datasetnode.addId(axis.id, node)
-
-    keys = vardict.keys()
-    keys.sort()
-    for key in keys:
-        if (includeList is not None) and (key not in includeList):
-            continue
-        if (excludeList is not None) and (key in excludeList):
-            continue
-        domain, attrs, tcode = vardict[key]
-        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
-            tcode = numpy.sctype2char(tcode)
-        domainNode = cdmsNode.DomainNode()
-        cdtype = cdmsNode.NumericToCdType[tcode]
-        node = cdmsNode.VariableNode(key, cdtype, domainNode)
-        cleanupAttrs(attrs)
-        # node.setExternalDict(attrs)
-        setNodeDict(node, attrs)
-        validateAttrs(node)
-        for axis in domain:
-            if hasattr(axis,'length'):
-                length = axis.length
-            else:
-                length = len(axis)
-            try:
-                elemnode = cdmsNode.DomElemNode(axis.id, 0, length)
-            except AttributeError:
-                print >> sys.stderr,  'Axis %s for variable %s does not have attribute "id"'%(`axis`, key)
-            if hasattr(axis, 'partition_length'):
-                elemnode.setExternalAttr('partition_length',axis.partition_length)
-            domainNode.add(elemnode)
-        datasetnode.addId(key, node)
-
-    # Add the Conventions attribute if not present
-    conventions = datasetnode.getExternalAttr('Conventions')
-    if conventions is None: datasetnode.setExternalAttr('Conventions','')
-    if templatestr is not None:
-        datasetnode.setExternalAttr('template',templatestr)
-
-    # Add/modify history
-    history = datasetnode.getExternalAttr('history')
-    if history is None:
-        history = ""
-    stringargv = reduce(lambda x,y: x+' '+y, argv)
-    stringtime = "\n[%s] "%timestamp()
-    if len(stringargv)<=256:
-        history += stringtime+stringargv
-    else:
-        history += stringtime+stringargv[:256]+" ..."
-    datasetnode.setExternalAttr('history',history)
-
-    ## datasetnode.validate()
-    if writeToStdout:
-        datasetnode.dump()
-    else:
-        datasetnode.dump(xmlpath)
-        if verbose:
-            print xmlpath,'written'
-
-#--------------------------------------------------------------------------------------------------------------------------
-if __name__ == '__main__':
-    main(sys.argv)
-    try:
-        from mpi4py import MPI
-        comm = MPI.Comm.Get_parent()
-        comm.send('done', dest=0)
-    except:
-        pass
-        
diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Script/cdscan
new file mode 120000
index 0000000000000000000000000000000000000000..11d1e0bf94069e9ee435058dd2b6ba8e9fdb1187
--- /dev/null
+++ b/Packages/cdms2/Script/cdscan
@@ -0,0 +1 @@
+../Lib/cdscan.py
\ No newline at end of file
diff --git a/Packages/cdutil/Lib/times.py b/Packages/cdutil/Lib/times.py
index 4b4c9de7cb7cdd2fe2a5e3fc9c0f562be369833e..fdbc9ba8e258dcae9834f4de0915f4ac6d33bbcb 100644
--- a/Packages/cdutil/Lib/times.py
+++ b/Packages/cdutil/Lib/times.py
@@ -118,7 +118,7 @@ def getMonthIndex(my_str):
        # end of for mon in mon_list:
            
    yr = 'JFMAMJJASOND'
-   yrs = yr+yr[:6]
+   yrs = yr+yr
    #
    result = string.find(yrs, my_str)
    if result == -1: return []
diff --git a/Packages/cdutil/Lib/vertical.py b/Packages/cdutil/Lib/vertical.py
index 9d6df0ae8f5e71e9f08cbcb510466a0de7dbe836..167b7d4cdef55f3fb24d2bb326a4b3cb58983891 100644
--- a/Packages/cdutil/Lib/vertical.py
+++ b/Packages/cdutil/Lib/vertical.py
@@ -4,17 +4,19 @@ import genutil
 import cdms2
 import numpy
 import cdat_info
-def reconstructPressureFromHybrid(ps,A,B,Po):
+
+
+def reconstructPressureFromHybrid(ps, A, B, Po):
     """
     Reconstruct the Pressure field on sigma levels, from the surface pressure
-    
+
     Input
     Ps   : Surface pressure
     A,B,Po: Hybrid Convertion Coefficients, such as: p=B.ps+A.Po
     Ps: surface pressure
     B,A are 1D : sigma levels
     Po and Ps must have same units
-    
+
     Output
     Pressure field
     Such as P=B*Ps+A*Po
@@ -23,200 +25,234 @@ def reconstructPressureFromHybrid(ps,A,B,Po):
     P=reconstructPressureFromHybrid(ps,A,B,Po)
     """
     # Compute the pressure for the sigma levels
-    cdat_info.pingPCMDIdb("cdat","cdutil.vertical.reconstructPressureFromHybrid")
-    ps,B=genutil.grower(ps,B)
-    ps,A=genutil.grower(ps,A)
-    p=ps*B
-    p=p+A*Po
+    cdat_info.pingPCMDIdb(
+        "cdat",
+        "cdutil.vertical.reconstructPressureFromHybrid")
+    ps, B = genutil.grower(ps, B)
+    ps, A = genutil.grower(ps, A)
+    p = ps * B
+    p = p + A * Po
     p.setAxisList(ps.getAxisList())
-    p.id='P'
+    p.id = 'P'
     try:
-      p.units=ps.units
+        p.units = ps.units
     except:
-      pass
-    t=p.getTime()
+        pass
+    t = p.getTime()
     if not t is None:
-      p=p(order='tz...')
+        p = p(order='tz...')
     else:
-     p=p(order='z...')
+        p = p(order='z...')
     return p
-    
-def linearInterpolation(A,I,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None):
+
+
+def linearInterpolation(
+    A, I, levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000,
+                  30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None, axis='z'):
     """
     Linear interpolation
     to interpolate a field from some levels to another set of levels
     Value below "surface" are masked
-    
+
     Input
     A :      array to interpolate
     I :      interpolation field (usually Pressure or depth) from TOP (level 0) to BOTTOM (last level), i.e P value going up with each level
     levels : levels to interplate to (same units as I), default levels are:[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000]
+    axis:    axis over which to do the linear interpolation, default is 'z', accepted: '1' '(myaxis)'
 
     I and levels must have same units
 
     Output
     array on new levels (levels)
-    
+
     Examples:
     A=interpolate(A,I,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000])
     """
-    
-    cdat_info.pingPCMDIdb("cdat","cdutil.vertical.linearInterpolation")
+
+    cdat_info.pingPCMDIdb("cdat", "cdutil.vertical.linearInterpolation")
     try:
-        nlev=len(levels)  # Number of pressure levels
+        nlev = len(levels)  # Number of pressure levels
     except:
-        nlev=1  # if only one level len(levels) would breaks
-        levels=[levels,]
-    order=A.getOrder()
-    A=A(order='z...')
-    I=I(order='z...')
-    sh=list(I.shape)
-    nsigma=sh[0] #number of sigma levels
-    sh[0]=nlev
-    t=MV2.zeros(sh,typecode=MV2.float32)
-    sh2=I[0].shape
-    prev=-1
-    for ilev in range(nlev): # loop through pressure levels
+        nlev = 1  # if only one level len(levels) would breaks
+        levels = [levels, ]
+    order = A.getOrder()
+    A = A(order='%s...' % axis)
+    I = I(order='%s...' % axis)
+    sh = list(I.shape)
+    nsigma = sh[0]  # number of sigma levels
+    sh[0] = nlev
+    t = MV2.zeros(sh, typecode=MV2.float32)
+    sh2 = I[0].shape
+    prev = -1
+    for ilev in range(nlev):  # loop through pressure levels
         if status is not None:
-            prev=genutil.statusbar(ilev,nlev-1.,prev)
-        lev=levels[ilev] # get value for the level
-        Iabv=MV2.ones(sh2,MV2.float)
-        Aabv=-1*Iabv # Array on sigma level Above
-        Abel=-1*Iabv # Array on sigma level Below
-        Ibel=-1*Iabv # Pressure on sigma level Below
-        Iabv=-1*Iabv # Pressure on sigma level Above
-        Ieq=MV2.masked_equal(Iabv,-1) # Area where Pressure == levels
-        for i in range(1,nsigma): # loop from second sigma level to last one
-            a = MV2.greater_equal(I[i],  lev) # Where is the pressure greater than lev
-            b =    MV2.less_equal(I[i-1],lev) # Where is the pressure less than lev
+            prev = genutil.statusbar(ilev, nlev - 1., prev)
+        lev = levels[ilev]  # get value for the level
+        Iabv = MV2.ones(sh2, MV2.float)
+        Aabv = -1 * Iabv  # Array on sigma level Above
+        Abel = -1 * Iabv  # Array on sigma level Below
+        Ibel = -1 * Iabv  # Pressure on sigma level Below
+        Iabv = -1 * Iabv  # Pressure on sigma level Above
+        Ieq = MV2.masked_equal(Iabv, -1)  # Area where Pressure == levels
+        for i in range(1, nsigma):  # loop from second sigma level to last one
+            a = MV2.greater_equal(
+                I[i],
+                lev)  # Where is the pressure greater than lev
+            b = MV2.less_equal(
+                I[i - 1],
+                lev)  # Where is the pressure less than lev
             # Now looks if the pressure level is in between the 2 sigma levels
             # If yes, sets Iabv, Ibel and Aabv, Abel
-            a=MV2.logical_and(a,b)
-            Iabv=MV2.where(a,I[i],Iabv) # Pressure on sigma level Above
-            Aabv=MV2.where(a,A[i],Aabv) # Array on sigma level Above
-            Ibel=MV2.where(a,I[i-1],Ibel) # Pressure on sigma level Below
-            Abel=MV2.where(a,A[i-1],Abel) # Array on sigma level Below
-            Ieq= MV2.where(MV2.equal(I[i],lev),A[i],Ieq)
-
-        val=MV2.masked_where(MV2.equal(Ibel,-1.),numpy.ones(Ibel.shape)*lev) # set to missing value if no data below lev if there is
-        
-        tl=(val-Ibel)/(Iabv-Ibel)*(Aabv-Abel)+Abel # Interpolation
+            a = MV2.logical_and(a, b)
+            Iabv = MV2.where(a, I[i], Iabv)  # Pressure on sigma level Above
+            Aabv = MV2.where(a, A[i], Aabv)  # Array on sigma level Above
+            Ibel = MV2.where(
+                a,
+                I[i - 1],
+                Ibel)  # Pressure on sigma level Below
+            Abel = MV2.where(a, A[i - 1], Abel)  # Array on sigma level Below
+            Ieq = MV2.where(MV2.equal(I[i], lev), A[i], Ieq)
+
+        val = MV2.masked_where(
+            MV2.equal(Ibel, -1.), numpy.ones(Ibel.shape) * lev)
+                               # set to missing value if no data below lev if
+                               # there is
+
+        tl = (val - Ibel) / (Iabv - Ibel) * \
+            (Aabv - Abel) + Abel  # Interpolation
         if ((Ieq.mask is None) or (Ieq.mask is MV2.nomask)):
-            tl=Ieq
+            tl = Ieq
         else:
-            tl=MV2.where(1-Ieq.mask,Ieq,tl)
-        t[ilev]=tl.astype(MV2.float32)
+            tl = MV2.where(1 - Ieq.mask, Ieq, tl)
+        t[ilev] = tl.astype(MV2.float32)
 
-    ax=A.getAxisList()
-    autobnds=cdms2.getAutoBounds()
+    ax = A.getAxisList()
+    autobnds = cdms2.getAutoBounds()
     cdms2.setAutoBounds('off')
-    lvl=cdms2.createAxis(MV2.array(levels).filled())
+    lvl = cdms2.createAxis(MV2.array(levels).filled())
     cdms2.setAutoBounds(autobnds)
     try:
-        lvl.units=I.units
+        lvl.units = I.units
     except:
         pass
-    lvl.id='plev'
-    
+    lvl.id = 'plev'
+
     try:
-      t.units=I.units
+        t.units = I.units
     except:
-      pass
-  
-    ax[0]=lvl
+        pass
+
+    ax[0] = lvl
     t.setAxisList(ax)
-    t.id=A.id
+    t.id = A.id
     for att in A.listattributes():
-        setattr(t,att,getattr(A,att))
+        setattr(t, att, getattr(A, att))
     return t(order=order)
 
-def logLinearInterpolation(A,P,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000],status=None):
+
+def logLinearInterpolation(
+    A, P, levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000,
+                  30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None, axis='z'):
     """
     Log-linear interpolation
     to convert a field from sigma levels to pressure levels
     Value below surface are masked
-    
+
     Input
-    A :    array on sigma levels
-    P :    pressure field from TOP (level 0) to BOTTOM (last level)
+    A :      array on sigma levels
+    P :      pressure field from TOP (level 0) to BOTTOM (last level)
     levels : pressure levels to interplate to (same units as P), default levels are:[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000]
+    axis:    axis over which to do the linear interpolation, default is 'z', accepted: '1' '(myaxis)'
 
     P and levels must have same units
 
     Output
     array on pressure levels (levels)
-    
+
     Examples:
     A=logLinearInterpolation(A,P),levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000])
     """
-    
-    cdat_info.pingPCMDIdb("cdat","cdutil.vertical.logLinearInterpolation")
+
+    cdat_info.pingPCMDIdb("cdat", "cdutil.vertical.logLinearInterpolation")
     try:
-        nlev=len(levels)  # Number of pressure levels
+        nlev = len(levels)  # Number of pressure levels
     except:
-        nlev=1  # if only one level len(levels) would breaks
-        levels=[levels,]
-    order=A.getOrder()
-    A=A(order='z...')
-    P=P(order='z...')
-    sh=list(P.shape)
-    nsigma=sh[0] #number of sigma levels
-    sh[0]=nlev
-    t=MV2.zeros(sh,typecode=MV2.float32)
-    sh2=P[0].shape
-    prev=-1
-    for ilev in range(nlev): # loop through pressure levels
+        nlev = 1  # if only one level len(levels) would breaks
+        levels = [levels, ]
+    order = A.getOrder()
+    A = A(order='%s...' % axis)
+    P = P(order='%s...' % axis)
+    sh = list(P.shape)
+    nsigma = sh[0]  # number of sigma levels
+    sh[0] = nlev
+    t = MV2.zeros(sh, typecode=MV2.float32)
+    sh2 = P[0].shape
+    prev = -1
+    for ilev in range(nlev):  # loop through pressure levels
         if status is not None:
-            prev=genutil.statusbar(ilev,nlev-1.,prev)
-        lev=levels[ilev] # get value for the level
-        Pabv=MV2.ones(sh2,MV2.float)
-        Aabv=-1*Pabv # Array on sigma level Above
-        Abel=-1*Pabv # Array on sigma level Below
-        Pbel=-1*Pabv # Pressure on sigma level Below
-        Pabv=-1*Pabv # Pressure on sigma level Above
-        Peq=MV2.masked_equal(Pabv,-1) # Area where Pressure == levels
-        for i in range(1,nsigma): # loop from second sigma level to last one
-            a=MV2.greater_equal(P[i],  lev) # Where is the pressure greater than lev
-            b=   MV2.less_equal(P[i-1],lev) # Where is the pressure less than lev
+            prev = genutil.statusbar(ilev, nlev - 1., prev)
+        lev = levels[ilev]  # get value for the level
+        Pabv = MV2.ones(sh2, MV2.float)
+        Aabv = -1 * Pabv  # Array on sigma level Above
+        Abel = -1 * Pabv  # Array on sigma level Below
+        Pbel = -1 * Pabv  # Pressure on sigma level Below
+        Pabv = -1 * Pabv  # Pressure on sigma level Above
+        Peq = MV2.masked_equal(Pabv, -1)  # Area where Pressure == levels
+        for i in range(1, nsigma):  # loop from second sigma level to last one
+            a = MV2.greater_equal(
+                P[i],
+                lev)  # Where is the pressure greater than lev
+            b = MV2.less_equal(
+                P[i - 1],
+                lev)  # Where is the pressure less than lev
             # Now looks if the pressure level is in between the 2 sigma levels
             # If yes, sets Pabv, Pbel and Aabv, Abel
-            a=MV2.logical_and(a,b)
-            Pabv=MV2.where(a,P[i],Pabv) # Pressure on sigma level Above
-            Aabv=MV2.where(a,A[i],Aabv) # Array on sigma level Above
-            Pbel=MV2.where(a,P[i-1],Pbel) # Pressure on sigma level Below
-            Abel=MV2.where(a,A[i-1],Abel) # Array on sigma level Below
-            Peq= MV2.where(MV2.equal(P[i],lev),A[i],Peq)
-
-        val=MV2.masked_where(MV2.equal(Pbel,-1),numpy.ones(Pbel.shape)*lev) # set to missing value if no data below lev if there is
-        
-        tl=MV2.log(val/Pbel)/MV2.log(Pabv/Pbel)*(Aabv-Abel)+Abel # Interpolation
+            a = MV2.logical_and(a, b)
+            Pabv = MV2.where(a, P[i], Pabv)  # Pressure on sigma level Above
+            Aabv = MV2.where(a, A[i], Aabv)  # Array on sigma level Above
+            Pbel = MV2.where(
+                a,
+                P[i - 1],
+                Pbel)  # Pressure on sigma level Below
+            Abel = MV2.where(a, A[i - 1], Abel)  # Array on sigma level Below
+            Peq = MV2.where(MV2.equal(P[i], lev), A[i], Peq)
+
+        val = MV2.masked_where(
+            MV2.equal(Pbel, -1), numpy.ones(Pbel.shape) * lev)
+                               # set to missing value if no data below lev if
+                               # there is
+
+        tl = MV2.log(
+            val / Pbel) / MV2.log(
+                Pabv / Pbel) * (
+            Aabv - Abel) + Abel  # Interpolation
         if ((Peq.mask is None) or (Peq.mask is MV2.nomask)):
-            tl=Peq
+            tl = Peq
         else:
-            tl=MV2.where(1-Peq.mask,Peq,tl)
-        t[ilev]=tl.astype(MV2.float32)
-        
-    ax=A.getAxisList()
-    autobnds=cdms2.getAutoBounds()
+            tl = MV2.where(1 - Peq.mask, Peq, tl)
+        t[ilev] = tl.astype(MV2.float32)
+
+    ax = A.getAxisList()
+    autobnds = cdms2.getAutoBounds()
     cdms2.setAutoBounds('off')
-    lvl=cdms2.createAxis(MV2.array(levels).filled())
+    lvl = cdms2.createAxis(MV2.array(levels).filled())
     cdms2.setAutoBounds(autobnds)
     try:
-        lvl.units=P.units
+        lvl.units = P.units
     except:
         pass
-    lvl.id='plev'
-    
+    lvl.id = 'plev'
+
     try:
-      t.units=P.units
+        t.units = P.units
     except:
-      pass
-  
-    ax[0]=lvl
+        pass
+
+    ax[0] = lvl
     t.setAxisList(ax)
-    t.id=A.id
+    t.id = A.id
     for att in A.listattributes():
-        setattr(t,att,getattr(A,att))
+        setattr(t, att, getattr(A, att))
     return t(order=order)
-    
-sigma2Pressure=logLinearInterpolation
+
+sigma2Pressure = logLinearInterpolation
diff --git a/CMake/cdat_modules/md5_external.cmake b/Packages/testing/__init__.py
similarity index 100%
rename from CMake/cdat_modules/md5_external.cmake
rename to Packages/testing/__init__.py
diff --git a/Packages/testing/common.py b/Packages/testing/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a32bcec518658892de1f1247214389961097eb0
--- /dev/null
+++ b/Packages/testing/common.py
@@ -0,0 +1,22 @@
+def test_values_setting(gm,attributes,good_values=[],bad_values=[]):
+  if isinstance(attributes,str):
+    attributes=[attributes,]
+  for att in attributes:
+    for val in good_values:
+      setattr(gm,att,val)
+    for val in bad_values:
+      try:
+        setattr(gm,att,val)
+        success = True
+      except:
+        success = False
+      else:
+        if success:
+          if hasattr(gm,"g_name"):
+            nm = gm.g_name
+          elif hasattr(gm,"s_name"):
+            nm = gm.s_name
+          else:
+            nm=gm.p_name
+          raise Exception,"Should not be able to set %s attribute '%s' to %s" % (nm,att,repr(val))
+          sys.exit(1)
diff --git a/testing/checkimage.py b/Packages/testing/regression.py
similarity index 69%
rename from testing/checkimage.py
rename to Packages/testing/regression.py
index e0ad9db82a7ed0adb35b080d9da48b929ce712e7..b0b862993b154f002cf4368c1d930bc811691ab8 100644
--- a/testing/checkimage.py
+++ b/Packages/testing/regression.py
@@ -9,11 +9,41 @@ import numpy
 import vtk
 import os
 import os.path
+import re
 import sys
 import logging
+import vcs
 
 defaultThreshold=10.0
 
+def init(*args, **kwargs):
+    testingDir = os.path.join(os.path.dirname(__file__), "..")
+    sys.path.append(testingDir)
+
+    if ((('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs))):
+        vcsinst = vcs.init(*args, **dict(kwargs, bg=1))
+        if ('geometry' not in kwargs):
+            vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
+        else:
+            xy = kwargs['geometry']
+            vcsinst.setbgoutputdimensions(xy[0], xy[1], units="pixels")
+    else:
+        vcsinst = vcs.init(*args, **dict(kwargs, bg=0))
+
+    vcsinst.setantialiasing(0)
+    vcsinst.drawlogooff()
+    return vcsinst
+
+def run(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    """Export plot to a png and exit after comparsion."""
+    vcsinst.png(fname)
+    sys.exit(check_result_image(fname, baseline, threshold))
+
+def run_wo_terminate(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    """Export plot to a png and return comparison with baseline."""
+    vcsinst.png(fname)
+    return check_result_image(fname, baseline, threshold)
+
 def image_compare(testImage, baselineImage):
     imageDiff = vtk.vtkImageDifference()
     imageDiff.SetInputData(testImage)
@@ -40,18 +70,20 @@ def image_from_file(fname):
         print "Problem opening file '%s': %s"%(fname,err)
         return None
 
+# find alternate baselines for fname of the form basename_d.ext
+# where fname = basename.ext and d is a digit between 1 and 9
 def find_alternates(fname):
     dirname = os.path.dirname(fname)
     prefix, ext = os.path.splitext(os.path.split(fname)[1])
     files = os.listdir(dirname)
     results = [fname]
     for i in files:
-        if i.startswith(prefix) and i.endswith(ext) and i != prefix+ext:
+        if (re.match(prefix + '_[1-9]' + ext, i)):
             results.append(os.path.join(dirname, i))
     return results
 
-def check_result_image(fname, baselinefname, threshold = defaultThreshold,
-                       baseline = True, cleanup=True):
+def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
+                       baseline=True, cleanup=True, update_baselines = False, suffix="_2"):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
@@ -101,6 +133,13 @@ def check_result_image(fname, baselinefname, threshold = defaultThreshold,
 
     print "All baselines failed! Lowest error (%f) exceeds threshold (%f)."%(bestDiff, threshold)
 
+    if update_baselines:
+        bestFilename2=bestFilename[:-4]+suffix+".png"
+        print "Update baselines is ON so we are assuming you know what you're doing"
+        print "Replacing baseline %s with new baseline from %s" % (bestFilename2, fname)
+        import shutil
+        shutil.copy2(fname, bestFilename2)
+
     sp = fname.split(".")
     diffFilename = ".".join(sp[:-1])+"_diff."+sp[-1]
     print "Saving image diff at '%s'."%diffFilename
@@ -116,6 +155,7 @@ def check_result_image(fname, baselinefname, threshold = defaultThreshold,
     printDart("ValidImage", "image/png", os.path.abspath(bestFilename), "File")
     return -1
 
+
 def main():
     if len(sys.argv) != 4:
         print "Error:"
diff --git a/Packages/testing/setup.py b/Packages/testing/setup.py
new file mode 100755
index 0000000000000000000000000000000000000000..85c270ed8642eae41095cfbf680307ec57e53428
--- /dev/null
+++ b/Packages/testing/setup.py
@@ -0,0 +1,13 @@
+import os, sys
+from distutils.core import setup
+import cdat_info
+
+sys.path.append(os.environ.get('BUILD_DIR',"build"))
+
+setup(name="testing",
+      version=cdat_info.Version,
+      description="Testing infrastructure for cdat",
+      url="http://uvcdat.llnl.gov",
+      packages=['testing'],
+      package_dir = {'testing': '', }
+)
diff --git a/Packages/dat/files.txt b/Packages/vcs/Share/sample_files.txt
similarity index 100%
rename from Packages/dat/files.txt
rename to Packages/vcs/Share/sample_files.txt
diff --git a/Packages/vcs/docs/Makefile b/Packages/vcs/docs/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..377f99e0d61332f9b09569d1a58da8164953ffe4
--- /dev/null
+++ b/Packages/vcs/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml        to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Girder.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Girder.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/Girder"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Girder"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/Packages/vcs/docs/conf.py b/Packages/vcs/docs/conf.py
new file mode 100755
index 0000000000000000000000000000000000000000..cc3fcd57b4d94d82200a89ae66d24678fdad3190
--- /dev/null
+++ b/Packages/vcs/docs/conf.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Tangelo Web Framework documentation build configuration file, created by
+# sphinx-quickstart on Thu Apr 11 11:42:23 2013.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+#import sphinx_bootstrap_theme
+
+# on_rtd is whether we are on readthedocs.org
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+if not on_rtd:  # only import and set the theme if we're building docs locally
+    import sphinx_rtd_theme
+    html_theme = 'sphinx_rtd_theme'
+    html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
+# otherwise, readthedocs.org uses their theme by default, so no need to specify it
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.extlinks', 'sphinx.ext.doctest']
+
+# turn off doctests of autodoc included files (these are tested elsewhere)
+doctest_test_doctest_blocks = None
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'VCS'
+copyright = '2016, LLNL'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# These are set to None here, but this is overridden in CMakeLists.txt via -D
+# flags to set them explicitly using a variable defined there.
+#
+# The short X.Y version.
+version = '0.1'
+
+# The full version, including alpha/beta/rc tags.
+release = '0.1.0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# Define an external link to refer to the base Tangelo installation - this is
+# the actual installation if the docs are built locally, or the default location
+# of localhost, port 80, for the documentation built on readthedocs.
+import os
+on_rtd = os.environ.get("READTHEDOCS", None) is not None
+extlinks = {"root": ("http://localhost:8080%s" if on_rtd else "%s", None)}
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+#html_theme = 'pyramid'
+#html_theme = 'bootstrap'
+#html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {
+#   'bootswatch_theme': "readable"
+#}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = "tangelo.ico"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+html_domain_indices = False
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# Show "todo" notes.
+todo_include_todos = False
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'VCSDoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'vcs.tex', 'VCS Documentation',
+   'LLNL', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'vcs', 'VCS Documentation',
+     ['LLNL'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  ('index', 'vcs', 'vcs Documentation',
+   'LLNL', 'vcs', 'Visualization library',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
diff --git a/Packages/vcs/docs/developer-guide.rst b/Packages/vcs/docs/developer-guide.rst
new file mode 100644
index 0000000000000000000000000000000000000000..cf18c40acd45b2806154b0b68f9677eab9de3f20
--- /dev/null
+++ b/Packages/vcs/docs/developer-guide.rst
@@ -0,0 +1,2 @@
+Developer Guide
+===============
diff --git a/Packages/vcs/docs/index.rst b/Packages/vcs/docs/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1fcb27b4eeba50966ee76ab42f6f895b3dfe03ba
--- /dev/null
+++ b/Packages/vcs/docs/index.rst
@@ -0,0 +1,59 @@
+VCS: Visualization Control System
+==================================
+
+What is VCS?
+---------------
+
+The PCMDI Visualization Control System (VCS) is expressly designed to meet the needs of scientific community. VCS allows wide-ranging changes to be made to the data display, provides for hardcopy output, and includes a means for recovery of a previous display.
+
+In the VCS model, the data display is defined by a trio of named object sets, designated the “primary objects” (or “primary elements”). These include:
+
+* **Data Ingestion**: The data, which drives the visualization is ingested into the system via cdms2 or numeric modules such as numpy;.
+
+* **Graphics Method**: The graphics method, which specifies the display technique.
+
+* **Template**: The picture template, which determines the appearance of each segment of the display. Tables for manipulating these primary objects are stored in VCS for later recall and possible use.
+
+In addition, detailed specification of the primary objects’ attributes is provided by eight “secondary objects” (or secondary elements”):
+
+* **colormap**: Specification of combinations of 256 available colors
+* **fill area**: Style, style index, and color index
+* **format**: Specifications for converting numbers to display strings
+* **line**: Line type, width and color index
+* **list**: A sequence of pairs of numerical and character values
+* **marker**: Marker type, size, and color index
+* **text**: Text font type, character spacing, expansion and color index
+* **text orientation**: Character height, angle, path, and horizontal/vertical alignment
+
+By combining primary and secondary objects in various ways (either at the command line or in a program), the VCS user can comprehensively diagnose and intercompare climate model simulations. VCS provides capabilities to:
+
+- View, select and modify attributes of data variables and of their dimensions
+- Create and modify existing template attributes and graphics methods
+- Save the state-of-the-system as a script to be run interactively or in a program
+- Save a display as a Computer Graphics Metafile (CGM), GIF, Postscript, Sun Raster, or Encapsulated Postscript file
+- Perform grid transformations and compute new data variables
+- Create and modify color maps
+- Zoom into a specified portion of a display
+- Change the orientation (portrait vs. landscape) or size (partial vs. full-screen) of a display
+- Animate a single data variable or more than one data variable simultaneously
+- Display data in various geospatial projections
+
+For an overview of the concepts present in VCS, we recommend checking out the :doc:`user-guide`.
+
+VCS is published under the Apache 2.0 License. Its source code can be found at
+https://github.com/UV-CDAT/uvcdat/Packages/vcs
+
+Table of contents
+-----------------
+.. toctree::
+   :maxdepth: 2
+
+   user-guide
+   developer-guide
+   reference
+
+API index
+---------
+
+* :ref:`genindex`
+* :ref:`modindex`VCS
\ No newline at end of file
diff --git a/Packages/vcs/docs/reference.rst b/Packages/vcs/docs/reference.rst
new file mode 100644
index 0000000000000000000000000000000000000000..73db692bf6ad6a231d0c94225e74fe57b42ca13c
--- /dev/null
+++ b/Packages/vcs/docs/reference.rst
@@ -0,0 +1,129 @@
+VCS Reference Guide
+--------------------
+
+init
+^^^^
+* Initialize, Construct a VCS Canvas Object
+
+.. code-block:: python
+
+    import vcs,cdms2
+
+    file = cdms2.open('clt.nc')
+
+    slab = file.getslab('clt')
+
+    a = vcs.init()
+
+    # This examples constructs 4 VCS Canvas a.plot(slab)
+    # Plot slab using default settings
+    b = vcs.init()
+
+    # Construct VCS object
+    template = b.gettemplate('AMIP')
+
+    # Get 'example' template object
+    b.plot(slab, template)
+
+    # Plot slab using template 'AMIP'
+    c = vcs.init()
+
+    # Construct new VCS object
+    isofill = c.getisofill('quick')
+
+    # Get 'quick' isofill graphics method
+    c.plot(slab,template,isofill)
+
+    # Plot slab using template and isofill objects
+    d = vcs.init()
+
+    # Construct new VCS object
+    isoline = c.getisoline('quick')
+
+    # Get 'quick' isoline graphics method
+    c.plot(isoline,slab,template)
+
+    # Plot slab using isoline and template objects
+
+help
+^^^^
+* Print out the object's doc string
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    ln = a.getline('red')
+
+    # Get a VCS line object
+    # This will print out information on how to use ln
+    a.objecthelp(ln)
+
+open
+^^^^
+* Open VCS Canvas object.
+* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.open()
+
+close
+^^^^^
+* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.plot(array, 'default', 'isofill', 'quick')
+    a.close()
+
+mode
+^^^^
+* ``Options <0 = manual, 1 = automatic>``
+* Update the VCS Canvas.
+* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed.
+
+.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.mode = 0
+    # Set updating to manual mode
+    a.plot(array, 'default', 'boxfill', 'quick')
+    box = x.getboxfill('quick')
+    box.color_1 = 100
+    box.xticlabels('lon30', 'lon30')
+    box.xticlabels('','')
+    box.datawc(1e20, 1e20, 1e20, 1e20)
+    box.datawc(-45.0, 45.0, -90.0, 90.0)
+
+    # Update the changes manually
+    a.update()
+
+update
+^^^^^^
+* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual).
+
+.. code-block:: python
+
+    import vcs
+
+    a = vcs.init()
+    a.mode = 0
+
+    # Go to manual mode a.plot(s,'default','boxfill','quick')
+    box = x.getboxfill('quick')
+    box.color_1 = 100
+    box.xticlabels('lon30', 'lon30')
+    box.xticlabels('','')
+    box.datawc(1e20, 1e20, 1e20, 1e20)
+    box.datawc(-45.0, 45.0, -90.0, 90.0)
+
+    # Update the changes manually
+    a.update()
\ No newline at end of file
diff --git a/Packages/vcs/docs/static/clt.png b/Packages/vcs/docs/static/clt.png
new file mode 100644
index 0000000000000000000000000000000000000000..3d721ffdb6ef9be9c54cbff606c06497f33adf8d
Binary files /dev/null and b/Packages/vcs/docs/static/clt.png differ
diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
new file mode 100644
index 0000000000000000000000000000000000000000..6e03c0ee330f5549cc3ce8ddbf93437e23621882
--- /dev/null
+++ b/Packages/vcs/docs/user-guide.rst
@@ -0,0 +1,443 @@
+User Guide
+==========
+
+Document Conventions
+--------------------
+
+This User Guide is written for end-users of vcs, rather than developers. If you
+have suggestions or questions about this documentation, feel free to contact us
+on `UV-CDAT <https://github.com/UV-CDAT/uvcdat>`_ `mailing list <uvcdat-users@lists.llnl.gov>`_.
+
+vcs specific entities will be ``formatted like this``.
+
+.. _concepts:
+
+Installation
+------------
+While there are many ways a user can install vcs, installation using conda is
+preferred for the end user. To install just vcs or uvcdat, make sure that anaconda
+or miniconda is installed and in path of your shell. Information on how to install conda
+can be found `here <https://www.continuum.io>`_. Very conda is available on the shell using
+the following command ::
+
+    conda --help
+
+To enable conda installation in a tight ssl certificate/security environment try ::
+
+    conda config --set ssl_verify False
+    binstar config --set verify_ssl False
+
+Install uvcdat which will install vcs as well using the following command ::
+
+    conda install uvcdat -c uvcdat
+
+To install only vcs, use the following command ::
+
+    conda install vcs -c uvcdat
+
+Concepts
+--------
+
+The VCS module can accept data from the CDMS module or can use the numpy array. For use on
+how to use either of the mentioned modules, see their respective documentation. For examples on the
+direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts.
+
+VCS Model
+^^^^^^^^^
+
+The VCS model is defined by a trio of named attribute sets, designated the “Primary Objects” (also known as “Primary Elements”).
+These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or numpy array;
+the graphics method, which specifies the display technique; and the picture template, which determines the appearance of
+each segment of the display.
+
+VCS Primary Objects (or Primary Elements)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below.
+
+**Graphics Method Objects**
+
+A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows:
+
+* ``boxfill`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is “Gfb”.
+* ``continents`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is “Gcon”.
+* ``isofill`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is “Gfi”.
+* ``isolineobject`` - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is “Gi”.
+* ``outfill`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Gfo”.
+* ``outline`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Go”.
+* ``scatter`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is “GSp”.
+* ``vector`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is “Gv”.
+* ``xvsy`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ‘t’ represents the 1D coordinate values. Its class symbol or alias is “GXY”.
+* ``xyvsy`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ‘y’ represents the 1D coordinate values. Its class symbol or alias is “GXy”.
+* ``Yxvsx`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ‘x’ represents the 1D coordinate values. Its class symbol or alias is “GYx”.
+* ``3dscalar`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
+* ``3dvector`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
+
+
+**Picture Template Object**
+
+A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows:
+
+* ``template`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
+
+**Data Object**
+
+Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation.
+
+VCS Secondary Objects (or Secondary Elements)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objects’ attributes. Currently, there are five secondary objects with more to follow.
+
+
+**Colormap Object**
+
+The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows:
+
+* ``colormap`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
+
+.. note:: VCS colormaps are objects, but they are not referenced like other secondary objects.
+
+
+**Fillarea Object**
+
+The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows:
+
+* ``fillarea`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
+
+
+**Line Object**
+
+The line object allows the editing of line type, width, and color index. The description of the line object is as follows:
+
+* ``line`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is “Tl”.
+
+
+**Marker Object**
+
+The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows:
+
+* ``marker`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
+
+
+**Text Objects**
+
+Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows:
+
+* ``textcombined`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is “Tc”.
+
+* ``textorientation`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
+
+* ``texttable`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
+
+
+Getting Started with VCS
+------------------------
+
+Import VCS
+^^^^^^^^^^
+
+In Python, before one can start using a module they must first load it.
+To load the VCS module, like all other Python modules, either type:
+
+``from vcs import``
+
+or
+
+``import vcs``
+
+If you use ``import vcs``, then you must prepend "vcs" to certain calls
+(e.g., ``vcs.help()``). If you use ``from vcs import *``, then you must
+be aware of possible name clashes. That is, if two packages are imported
+using the form ``from name import *`` and both have a "help" function,
+then Python doesn't know which ``help`` function to call. For such
+cases, and indeed as an unspoken rule, it is best to use "import name"
+to avoid name clashing between packages.
+
+Create Canvas Object
+^^^^^^^^^^^^^^^^^^^^
+
+To construct a VCS Canvas object type the following:
+
+``a = vcs.init()``
+
+There can only be at most 8 VCS Canvas objects initialized at any given
+time.
+
+Plotting in VCS
+^^^^^^^^^^^^^^^
+There are several different ways to display data on the VCS Canvas. The
+most basic way is to use the plot() function. The simple plot() function
+command: plot(array1,[array2], [template object], [graphics\_method
+object]). The examples below are showing how to plot a simple array
+using default values for everything else.
+
+.. code-block:: python
+
+    # Import vcs and io (cdms) modules
+    import vcs
+    import cdms2
+
+    # Open sample NetCDF data file. Depending on the location of clt.nc
+    # a relative or absolute path might be needed.
+    data = cdms2.open('clt.nc')
+
+    # Initialize vcs with specific width and height
+    # and then plot the variable
+    canvas = vcs.init(geometry=(400, 400))
+    clt = data['clt']
+    canvas.plot(clt)
+
+    # Close the canvas context
+    canvas.close()
+
+The script should produce a plot as shown below:
+
+.. image:: static/clt.png
+   :width: 400px
+   :height: 400px
+   :align: center
+
+As mentioned earlier, vcs can use numpy array directly. The example below shows how to plot numpy array data.
+
+.. code-block:: python
+
+    # Import necessary modules
+    import vcs
+    import cdms2
+    import numpy
+
+    # Manually create data
+    data = numpy.sin(numpy.arrange(100))
+
+    # Reshape to make it useful for vcs
+    data = numpy.reshape(data, (10, 10))
+
+    # Initialize vcs and then plot the data
+    canvas = vcs.init()
+    canvas.plot(data)
+
+    # Close the canvas context
+    canvas.close()
+
+It should be noted that plot can take multiple arguments. For example, plot can take ``bg=1`` option to draw visualization in the background. Below is the plotting climate data example with few new options to plot method.
+
+.. code-block:: python
+
+    # Import vcs and io (cdms) modules
+    import vcs
+    import cdms2
+
+    # Open sample NetCDF data file
+    data = cdms2.open('clt.nc')
+
+    # Initialize vcs and then plot the variable
+    canvas = vcs.init()
+
+    # Create isofill graphics method
+    iso = canvas.createisofill()
+
+    # Create default template
+    template = canvas.createtemplate()
+
+    # Scale down visualization space
+    template.scale(0.8)
+
+    clt = data['clt']
+
+    # Plot isofill with continents outline and custom template
+    canvas.plot(template, iso, clt, continents=1)
+
+    # Close the canvas context
+    canvas.close()
+
+
+.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. The order of the arguments is not restrictive, just as long as they are before any keyword argument.
+
+
+Other Plotting functions in VCS
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+There are other ways to plot data in VCS. These additional plotting
+routines utilizes the same parameter format as the plot() function. What
+makes these plotting functions unique are their direct association with
+the graphics methods. That is, each graphics method has its own plot
+function. For example, if the user wishes to plot data using the isofill
+graphics method, then the function isofill() can be used instead of the
+plot() function. If the isofill object is not specified then the default
+isofill graphics method will be used. The user can also pass down the
+name of the graphics method to be used. In some ways, the graphics
+method plot functions can be thought of as short cuts to plotting data.
+
+Note, if a different graphics method object is specified and passed down
+to one of these alternate plot functions, then the alternate plot
+function will behave as the plot() function and plot the data in the
+specified graphics method format.
+
+See table below for additional plot functions.
+
++--------------------+--------------------------------------------------+
+| Plot Function      | Description                                      |
++====================+==================================================+
+| ``boxfill()``      | plot data using the boxfill graphics method      |
++--------------------+--------------------------------------------------+
+| ``continents()``   | plot to the screen continental graphics method   |
++--------------------+--------------------------------------------------+
+| ``isofill()``      | plot data using the isofill graphics method      |
++--------------------+--------------------------------------------------+
+| ``isoline()``      | plot data using the isoline graphics method      |
++--------------------+--------------------------------------------------+
+| ``outfill()``      | plot data using the outfill graphics method      |
++--------------------+--------------------------------------------------+
+| ``outline()``      | plot data using the outline graphics method      |
++--------------------+--------------------------------------------------+
+| ``scatter()``      | plot data using the scatter graphics method      |
++--------------------+--------------------------------------------------+
+| ``vector()``       | plot data using the vector graphics method       |
++--------------------+--------------------------------------------------+
+| ``xvsy()``         | plot data using the xvsy graphics method         |
++--------------------+--------------------------------------------------+
+| ``xyvsy()``        | plot data using the xyvsy graphics method        |
++--------------------+--------------------------------------------------+
+| ``yxvsy()``        | plot data using the yxvsy graphics method        |
++--------------------+--------------------------------------------------+
+| ``scalar3D()``     | plot data using the 3d\_scalar graphics method   |
++--------------------+--------------------------------------------------+
+| ``vector3D()``     | plot data using the 3d\_vector graphics method   |
++--------------------+--------------------------------------------------+
+
+
+Creating VCS Objects
+^^^^^^^^^^^^^^^^^^^^
+
+The create functions enables the user to create VCS objects which can be
+modified directly to produce the desired results. Since the VCS
+"default" objects do allow modifications, it is best to either create a
+new VCS object or get an existing one. When a VCS object is created, it
+is stored in an internal table for later use and/or recall.
+
+Create the following VCS objects:
+
++-------------------------------+---------------------------------------------------+
+| Create Function               | Description                                       |
++===============================+===================================================+
+| ``createboxfill()``           | creates a new boxfill graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createcontinents()``        | creates a new continents graphics method object   |
++-------------------------------+---------------------------------------------------+
+| ``createfillarea()``          | creates a new fillarea secondary object           |
++-------------------------------+---------------------------------------------------+
+| ``createisofill()``           | creates a new isofill graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createisoline()``           | creates a new isoline graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createline()``              | creates a new line secondary object               |
++-------------------------------+---------------------------------------------------+
+| ``createmarker()``            | creates a new marker secondary object             |
++-------------------------------+---------------------------------------------------+
+| ``createoutfill()``           | creates a new outfill graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createoutline()``           | creates a new outline graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createscatter()``           | creates a new scatter graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createtextcombined()``      | creates a new text-combined secondary object      |
++-------------------------------+---------------------------------------------------+
+| ``createtextorientation()``   | creates a new text-orientation secondary object   |
++-------------------------------+---------------------------------------------------+
+| ``createtexttable()``         | creates a new text-table secondary object         |
++-------------------------------+---------------------------------------------------+
+| ``createvector()``            | creates a new vector graphics method object       |
++-------------------------------+---------------------------------------------------+
+| ``createxvsy()``              | creates a new xvsy graphics method object         |
++-------------------------------+---------------------------------------------------+
+| ``createxyvsy()``             | creates a new xyvsy graphics method object        |
++-------------------------------+---------------------------------------------------+
+| ``createyxvsx()``             | creates a new xyvsy graphics method object        |
++-------------------------------+---------------------------------------------------+
+| ``create3d_scalar()``         | creates a new 3d\_scalar graphics method object   |
++-------------------------------+---------------------------------------------------+
+| ``create3d_vector()``         | creates a new 3d\_vector graphics method object   |
++-------------------------------+---------------------------------------------------+
+
+
+Get Existing VCS Objects
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+The get functions are used to obtain VCS objects that exist in the
+object memory tables. The get function directly manipulates the object's
+attributes in memory. If the object is used to display data on a plot
+and is manipulated by the user, then the plot will be automatically
+updated.
+
+Get the following VCS objects:
+
++----------------------------+--------------------------------------------------------------------------------------+
+| Get Function               | Description                                                                          |
++============================+======================================================================================+
+| ``getboxfill()``           | get specified boxfill graphics method and create boxfill object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getcontinents()``        | get specified continents graphics method and create continents object                |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getfillarea()``          | get specified fillarea secondary object and create fillarea object                   |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getisofill()``           | get specified isofill graphics method and create fillarea object                     |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getisoline()``           | get specified isoline graphics method and create isoline object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getline()``              | get specified line secondary object and create line object                           |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getmarker()``            | get specified marker secondary object and create marker object                       |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getoutfill()``           | get specified outfill graphics method and create outfill object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getoutline()``           | get specifed outline graphics method and create outline object                       |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getscatter()``           | get specified scatter graphics method and create scatter object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``gettextcombined()``      | get specified text-combined secondary object and create text-combined object         |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``gettextorientation()``   | get specified text-orientation secondary object and create text-orientation object   |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``gettexttable()``         | get specified text-table secondary object and create text-table object               |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getvector()``            | get specified vector graphics method and create vector object                        |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getxvsy()``              | get specified xvsy graphics method and create xvsy object                            |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getxyvsy()``             | get specified xyvsy graphics method and create xyvsy object                          |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getyxvsx()``             | get specified yxvsx graphics method and create yxvsx                                 |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``get3d_scalar()``         | get specified 3d\_scalar graphics method and create 3d\_scalar                       |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``get3d_vector()``         | get specified 3d\_vector graphics method and create 3d\_vector                       |
++----------------------------+--------------------------------------------------------------------------------------+
+
+
+Removing VCS Objects
+^^^^^^^^^^^^^^^^^^^^
+
+Unwanted VCS objects can be removed from internal memory with the use of
+the remove function. The remove function will identify the VCS object
+type and remove it from the appropriate object table.
+
+Remove VCS objects:
+
++----------------------+----------------------------------------------------------------------+
+| Remove               | Description                                                          |
++======================+======================================================================+
+| ``removeobject()``   | allows the user to remove objects from the appropriate object list   |
++----------------------+----------------------------------------------------------------------+
+
+Show VCS Object List
+^^^^^^^^^^^^^^^^^^^^
+
+The show function is handy to list VCS objects tables.
+
+The show function is used to list the VCS objects in memory:
+
++-----------------+----------------------------------------------------------+
+| Show Function   | Description                                              |
++=================+==========================================================+
+| ``show()``      | list VCS primary and secondary class objects in memory   |
++-----------------+----------------------------------------------------------+
diff --git a/Packages/vcs/scripts/vcs_download_sample_data b/Packages/vcs/scripts/vcs_download_sample_data
new file mode 100755
index 0000000000000000000000000000000000000000..de3829e37fdb2dba4d783c03dece024974e33556
--- /dev/null
+++ b/Packages/vcs/scripts/vcs_download_sample_data
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+import vcs
+vcs.download_sample_data_files()
+
diff --git a/Packages/vcs/setup.py b/Packages/vcs/setup.py
index e3f9dd0229be8588b605b20ebd695280bf50c595..06f0ef5b9d0e88488efbcb00b26ba54592cce571 100755
--- a/Packages/vcs/setup.py
+++ b/Packages/vcs/setup.py
@@ -27,6 +27,7 @@ setup(name="vcs",
       packages=find_packages(),
       package_dir={'vcs': 'vcs',
                    },
+      scripts= ["scripts/vcs_download_sample_data"],
       data_files=[('share/vcs', ('Share/wmo_symbols.json',
                                  'Share/data_continent_coarse',
                                  'Share/data_continent_political',
@@ -40,6 +41,7 @@ setup(name="vcs",
                                  'Share/text_icon.png',
                                  'Share/fill_icon.png',
                                  'Share/line_icon.png',
+                                 'Share/sample_files.txt',
                                  'Fonts/Adelon_Regular.ttf',
                                  'Fonts/Arabic.ttf',
                                  'Fonts/Athens_Greek.ttf',
diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 860f7858d82f7eb06be0c3e2179a595c45dac9ab..ca1564d54277219afce7cc921f66a79d4eb1a652 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -1,33 +1,4 @@
-#!/usr/bin/env python
-# Adapted for numpy/ma/cdms2 by convertcdms.py
-#
-# The VCS Canvas API controls -  canvas module
-#
-###############################################################################
-#                                                                             #
-# Module:       canvas module                                                 #
-#                                                                             #
-# Copyright:    "See file Legal.htm for copyright information."               #
-#                                                                             #
-# Authors:      PCMDI Software Team                                           #
-#               Lawrence Livermore National Laboratory:                       #
-#               support@pcmdi.llnl.gov                                        #
-#                                                                             #
-# Description:  PCMDI's VCS Canvas is used to display plots and to create and #
-#               run animations.  It is always visible on the screen in a      #
-#               landscape (width exceeding height), portrait (height exceeding#
-#               width), or full-screen mode.                                  #
-#                                                                             #
-# Version: 2.4                                                          #
-#                                                                             #
-###############################################################################
-
-"""Canvas: the class representing a vcs drawing window
-Normally, created by vcs.init()
-Contains the method plot.
-"""
 import warnings
-from pauser import pause
 import numpy.ma
 import MV2
 import numpy
@@ -71,7 +42,10 @@ canvas_closed = 0
 import vcsaddons  # noqa
 import vcs.manageElements  # noqa
 import configurator  # noqa
-from projection import round_projections  # noqa
+from projection import no_deformation_projections  # noqa
+
+# Python < 3 DeprecationWarning ignored by default
+warnings.simplefilter('default')
 
 
 class SIGNAL(object):
@@ -307,22 +281,12 @@ def _process_keyword(obj, target, source, keyargs, default=None):
 
 
 class Canvas(object):
-
     """
- Function: Canvas                     # Construct a VCS Canvas class Object
-
- Description of Function:
-    Construct the VCS Canas object. There can only be at most 8 VCS
-    Canvases open at any given time.
+    The object onto which all plots are drawn.
 
- Example of Use:
-    a=vcs.Canvas()                    # This examples constructs a VCS Canvas
-"""
-    ##########################################################################
-    #                                                                           #
-    # Set attributes for VCS Canvas Class (i.e., set VCS Canvas Mode).          #
-    #                                                                           #
-    ##########################################################################
+    Usually created using `vcs.init`, this object provides easy access
+    to the functionality of the entire VCS module.
+    """
     __slots__ = [
         '_mode',
         '_pause_time',
@@ -330,9 +294,7 @@ class Canvas(object):
         '_worldcoordinate',
         '_winfo_id',
         '_varglist',
-        '_canvas_gui',
         '_animate_info',
-        '_canvas_template_editor',
         '_isplottinggridded',
         '_user_actions_names',
         '_user_actions',
@@ -345,7 +307,6 @@ class Canvas(object):
         'worldcoordinate',
         'winfo_id',
         'varglist',
-        'canvas_gui'
         'animate_info',
         'canvas_template_editor',
         'isplottinggridded',
@@ -355,7 +316,6 @@ class Canvas(object):
         'user_actions_names',
         'user_actions',
         'size',
-        'canvas_guianimate_info',
         'ParameterChanged',
         'colormap',
         'backgroundcolor',
@@ -442,13 +402,6 @@ class Canvas(object):
         return self._varglist
     varglist = property(_getvarglist, _setvarglist)
 
-    def _setcanvas_gui(self, value):
-        self._canvas_gui = value
-
-    def _getcanvas_gui(self):
-        return self._canvas_gui
-    canvas_gui = property(_getcanvas_gui, _setcanvas_gui)
-
     def _setcanvas(self, value):
         raise vcsError("Error, canvas is not an attribute you can set")
 
@@ -495,15 +448,6 @@ class Canvas(object):
         return self._worldcoordinate
     worldcoordinate = property(_getworldcoordinate, _setworldcoordinate)
 
-    def _setcanvas_template_editor(self, value):
-        self._canvas_template_editor = value  # No check on this!
-
-    def _getcanvas_template_editor(self):
-        return self._canvas_template_editor
-    canvas_template_editor = property(
-        _getcanvas_template_editor,
-        _setcanvas_template_editor)
-
     def _setisplottinggridded(self, value):
         if not isinstance(value, bool):
             raise vcsError("isplottinggridded must be boolean")
@@ -831,34 +775,23 @@ class Canvas(object):
 #        tv = self._datawc_tv( tv, arglist )
         return tv
 
-    ##########################################################################
-    #                                                                           #
-    # Print out the object's doc string.                                        #
-    #                                                                           #
-    ##########################################################################
     def objecthelp(self, *arg):
         """
- Function: objecthelp               # Print out the object's doc string
-
- Description of Function:
     Print out information on the VCS object. See example below on its use.
 
- Example of Use:
-    a=vcs.init()
+    :Example:
 
-    ln=a.getline('red')                 # Get a VCS line object
-    a.objecthelp(ln)                    # This will print out information on how to use ln
+::
+
+    a=vcs.init()
+    # Get a VCS line object
+    ln=a.getline('red')
+    # This will print out information on how to use ln
+    a.objecthelp(ln)
     """
         for x in arg:
             print getattr(x, "__doc__", "")
 
-    ##########################################################################
-    #                                                                           #
-    # Initialize the VCS Canvas and set the Canvas mode to 0. Because the mode  #
-    # is set to 0, the user will have to manually update the VCS Canvas by      #
-    # using the "update" function.                                              #
-    #                                                                           #
-    ##########################################################################
     def __init__(self, mode=1, pause_time=0,
                  call_from_gui=0, size=None, backend="vtk", geometry=None, bg=None):
         self._canvas_id = vcs.next_canvas_id
@@ -883,9 +816,7 @@ class Canvas(object):
 
         self.winfo_id = -99
         self.varglist = []
-        self.canvas_gui = None
         self.isplottinggridded = False
-        self.canvas_guianimate_info = None
 
         if size is None:
             psize = 1.2941176470588236
@@ -961,12 +892,10 @@ class Canvas(object):
 
 # Initial.attributes is being called in main.c, so it is not needed here!
 # Actually it is for taylordiagram graphic methods....
-##########################################################################
-#  Okay, then this is redundant since it is done in main.c. When time perments, put the   #
-#  taylordiagram graphic methods attributes in main.c Because this is here we must check  #
-#  to make sure that the initial attributes file is called only once for normalization    #
-#  purposes....                                                                           #
-##########################################################################
+#  Okay, then this is redundant since it is done in main.c. When time perments, put the
+#  taylordiagram graphic methods attributes in main.c Because this is here we must check
+#  to make sure that the initial attributes file is called only once for normalization
+#  purposes....
 
         self.canvas_template_editor = None
         self.ratio = '0'
@@ -996,26 +925,25 @@ class Canvas(object):
 
     # Functions to set/querie drawing of UV-CDAT logo
     def drawlogoon(self):
-        """Turn on drawing of logo on pix"""
+        """Show UV-CDAT logo on the canvas"""
         self.enableLogo = True
 
     def drawlogooff(self):
-        """Turn off drawing of logo on pix"""
+        """Hide UV-CDAT logo on the canvas"""
         self.enableLogo = False
 
     def getdrawlogo(self):
-        """Return value of draw logo"""
+        """
+        Returns value of draw logo
+
+        :returns: Boolean value of system variable which indicates whether log will be drawn
+        :rtype: bool
+        """
         return self.enableLogo
 
     def initLogoDrawing(self):
         self.drawLogo = self.enableLogo
 
-    ##########################################################################
-    #                                                                           #
-    # Update wrapper function for VCS.                                          #
-    #                                                                           #
-    ##########################################################################
-
     def update(self, *args, **kargs):
         """
  Function: update                   # Update the VCS Canvas.
@@ -1025,7 +953,7 @@ class Canvas(object):
     set to manual, then use this function to update the plot(s)
     manually.
 
- Example of Use:
+ :Example:
     ...
 
     a=vcs.init()
@@ -1043,25 +971,6 @@ class Canvas(object):
 
         return self.backend.update(*args, **kargs)
 
-    ##########################################################################
-    #                                                                           #
-    # Update wrapper function for VCS with a check to update the continents.    #
-    #                                                                           #
-    ##########################################################################
-    def _update_continents_check(self, *args):
-
-        a = self.canvas.updatecanvas_continents(*args)
-        self.flush()  # update the canvas by processing all the X events
-        self.backing_store()
-        pause(self.pause_time)
-
-        return a
-
-    ##########################################################################
-    #                                                                           #
-    # Script VCS primary or secondary elements wrapper functions for VCS.       #
-    #                                                                           #
-    ##########################################################################
     def scriptobject(self, obj, script_filename=None, mode=None):
         """
  Function: scriptobject       # Script a single primary or secondary class object
@@ -1083,7 +992,7 @@ class Canvas(object):
           However, a `default' attribute set that has been copied under a
           different name can be saved as a script file.
 
- Example of Use:
+ :Example:
     a=vcs.init()
     l=a.getline('red')         # To Modify an existing line object
     i=x.createisoline('dean')  # Create an instance of default isoline object
@@ -1136,13 +1045,14 @@ class Canvas(object):
         else:
             print 'This is not a template, graphics method or secondary method object.'
 
-    ##########################################################################
-    #                                                                           #
-    # Remove VCS primary and secondary methods wrapper functions for VCS.       #
-    #                                                                           #
-    ##########################################################################
-
     def removeobject(self, obj):
+        """
+        Removes a VCS object from the cache of available objects that can be referred to by name.
+
+        :param obj: The VCS object to be removed.
+        :type obj: a VCS object
+        :returns: ???
+        """
         __doc__ = vcs.removeobject.__doc__  # noqa
         return vcs.removeobject(obj)
 
@@ -1150,18 +1060,23 @@ class Canvas(object):
         return vcs.removeP(*args)
 
     def clean_auto_generated_objects(self, type=None):
-        """ cleans all self/auto genrated objects in vcs, only if they're not in use
-        Example:
-        import vcs
-        x=vcs.init()
-        x.clean_auto_generated_objects() # cleans everything
-        x.clean_auto_generated_objects('template') # cleans template objects
+        """
+        Cleans up all automaticaly generated VCS objects.
+
+        This function will delete all references to objects that
+        VCS created automatically in response to user actions but are
+        no longer in use. This shouldn't be necessary most of the time,
+        but if you're running into performance/memory issues, calling it
+        periodically may help.
+
+        :param type: Type of objects to remove. By default, will remove everything.
+        :type type: None, str, list/tuple (of str)
         """
 
         if type is None:
             type = self.listelements()
             type.remove("fontNumber")
-        elif isinstance(type, str):
+        elif isinstance(type, (str, unicode)):
             type = [type, ]
         elif not isinstance(type, (list, tuple)):
             return
@@ -1189,12 +1104,8 @@ class Canvas(object):
 
     def check_name_source(self, name, source, typ):
         return vcs.check_name_source(name, source, typ)
+    check_name_source.__doc__ = vcs.manageElements.check_name_source.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Template functions for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
     def createtemplate(self, name=None, source='default'):
         return vcs.createtemplate(name, source)
     createtemplate.__doc__ = vcs.manageElements.createtemplate.__doc__
@@ -1203,11 +1114,6 @@ class Canvas(object):
         return vcs.gettemplate(Pt_name_src)
     gettemplate.__doc__ = vcs.manageElements.gettemplate.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Projection functions for VCS.                                             #
-    #                                                                           #
-    ##########################################################################
     def createprojection(self, name=None, source='default'):
         return vcs.createprojection(name, source)
     createprojection.__doc__ = vcs.manageElements.createprojection.__doc__
@@ -1216,11 +1122,6 @@ class Canvas(object):
         return vcs.getprojection(Proj_name_src)
     getprojection.__doc__ = vcs.manageElements.getprojection.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Boxfill functions for VCS.                                                #
-    #                                                                           #
-    ##########################################################################
     def createboxfill(self, name=None, source='default'):
         return vcs.createboxfill(name, source)
     createboxfill.__doc__ = vcs.manageElements.createboxfill.__doc__
@@ -1231,59 +1132,54 @@ class Canvas(object):
 
     def boxfill(self, *args, **parms):
         """
-Options:::
-%s
-%s
-%s
-:::
- Input:::
-%s
-    :::
- Output:::
-%s
-    :::
+        Plot a boxfill.
 
- Function: boxfill                        # Generate a boxfill plot
+        Generate a boxfill plot given the data, boxfill graphics method, and
+        template. If no boxfill class object is given, then the 'default' boxfill
+        graphics method is used. Similarly, if no template class object is given,
+        then the 'default' template is used.
 
- Description of Function:
-    Generate a boxfill plot given the data, boxfill graphics method, and
-    template. If no boxfill class object is given, then the 'default' boxfill
-    graphics method is used. Similarly, if no template class object is given,
-    then the 'default' template is used.
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('boxfill')                        # Show all the existing boxfill graphics methods
-    box=a.getboxfill('quick')                # Create instance of 'quick'
-    a.boxfill(array,box)                # Plot array using specified box and default
-                                        #         template
-    templt=a.gettemplate('AMIP')        # Create an instance of template 'AMIP'
-    a.clear()                           # Clear VCS canvas
-    a.boxfill(array,box,template)       # Plot array using specified box and template
-    a.boxfill(box,array,template)       # Plot array using specified box and template
-    a.boxfill(template,array,box)       # Plot array using specified box and template
-    a.boxfill(template,array,box)       # Plot array using specified box and template
-    a.boxfill(array,'AMIP','quick')     # Use 'AMIP' template and 'quick' boxfill
-    a.boxfill('AMIP',array,'quick')     # Use 'AMIP' template and 'quick' boxfill
-    a.boxfill('AMIP','quick',array)     # Use 'AMIP' template and 'quick' boxfill
-
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End boxfill Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+    # Show all the existing boxfill graphics methods
+    a.show('boxfill')
+    # Create instance of 'quick'
+    box=a.getboxfill('quick')
+    # Plot array using specified box and default template
+    a.boxfill(array,box)
+    # Create an instance of template 'AMIP'
+    templt=a.gettemplate('AMIP')
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified box and template
+    a.boxfill(array,box,template)
+    # Plot array using specified box and template
+    a.boxfill(box,array,template)
+    # Plot array using specified box and template
+    a.boxfill(template,array,box)
+    # Plot array using specified box and template
+    a.boxfill(template,array,box)
+    # Use 'AMIP' template and 'quick' boxfill
+    a.boxfill(array,'AMIP','quick')
+    # Use 'AMIP' template and 'quick' boxfill
+    a.boxfill('AMIP',array,'quick')
+    # Use 'AMIP' template and 'quick' boxfill
+    a.boxfill('AMIP','quick',array)
 
+%s
+%s
+%s
+%s
+%s
 """
         arglist = _determine_arg_list('boxfill', args)
         return self.__plot(arglist, parms)
     boxfill.__doc__ = boxfill.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output)
 
-    ##########################################################################
-    #                                                                           #
-    # Taylordiagram functions for VCS.                                          #
-    #                                                                           #
-    ##########################################################################
     def createtaylordiagram(self, name=None, source='default'):
         return vcs.createtaylordiagram(name, source)
     createtaylordiagram.__doc__ = vcs.manageElements.createtaylordiagram.__doc__
@@ -1294,32 +1190,32 @@ Options:::
 
     def taylordiagram(self, *args, **parms):
         """
- Function: taylordiagram                        # Generate an taylordiagram plot
+        Generate a taylor diagram plot.
 
- Description of Function:
-    Generate a taylordiagram plot given the data, taylordiagram graphics method, and
-    template. If no taylordiagram class object is given, then the 'default' taylordiagram
-    graphics method is used. Similarly, if no template class object is given,
-    then the 'default' template is used.
+        Generate a taylordiagram plot given the data, taylordiagram graphics method, and
+        template. If no taylordiagram class object is given, then the 'default' taylordiagram
+        graphics method is used. Similarly, if no template class object is given,
+        then the 'default' template is used.
+
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('taylordiagram')                   # Show all the existing taylordiagram graphics methods
-    td=a.gettaylordiagram()                   # Create instance of 'default'
-    a.taylordiagram(array,td)                 # Plot array using specified iso and default
-                                              #       template
-    a.clear()                                 # Clear VCS canvas
-    a.taylordiagram(array,td,template)        # Plot array using specified iso and template
+    # Show all the existing taylordiagram graphics methods
+    a.show('taylordiagram')
+    # Create instance of 'default'
+    td=a.gettaylordiagram()
+    # Plot array using specified iso and default template
+    a.taylordiagram(array,td)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified iso and template
+    a.taylordiagram(array,td,template)
 """
         arglist = _determine_arg_list('taylordiagram', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
-    #                                                                           #
-    # Meshfill functions for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
-
     def createmeshfill(self, name=None, source='default'):
         return vcs.createmeshfill(name, source)
     createmeshfill.__doc__ = vcs.manageElements.createmeshfill.__doc__
@@ -1330,9 +1226,6 @@ Options:::
 
     def meshfill(self, *args, **parms):  # noqa
         """
- Function: meshfill               # Generate an meshfill plot
-
- Description of Function:
     Generate a meshfill plot given the data, the mesh, a meshfill graphics method, and
     a template. If no meshfill class object is given, then the 'default' meshfill
     graphics method is used. Similarly, if no template class object is given,
@@ -1352,25 +1245,28 @@ Options:::
     data.shape=(10000,)
     mesh.shape=(10000,2,4)
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('meshfill')                   # Show all the existing meshfill graphics methods
-    mesh=a.getmeshfill()                 # Create instance of 'default'
-    a.meshfill(array,mesh)               # Plot array using specified mesh and default
-                                         #       template
-    a.clear()                            # Clear VCS canvas
+    # Show all the existing meshfill graphics methods
+    a.show('meshfill')
+    # Create instance of 'default'
+    mesh=a.getmeshfill()
+    # Plot array using specified mesh and default template
+    a.meshfill(array,mesh)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified mesh mesh graphic method and template
     a.meshfill(array,mesh,mesh_graphic_method,template)
-                           # Plot array using specified mesh mesh graphic method and template
+
+:returns: ???
+:rtype: ???
 """
         arglist = _determine_arg_list('meshfill', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
-    #                                                                           #
-    # DV3D functions for VCS.                                                #
-    #                                                                           #
-    ##########################################################################
-
     def create3d_scalar(self, name=None, source='default'):
         return vcs.create3d_scalar(name, source)
 
@@ -1410,11 +1306,6 @@ Options:::
         arglist = _determine_arg_list('3d_dual_scalar', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
-    #                                                                           #
-    # Isofill functions for VCS.                                                #
-    #                                                                           #
-    ##########################################################################
     def createisofill(self, name=None, source='default'):
         return vcs.createisofill(name, source)
     createisofill.__doc__ = vcs.manageElements.createisofill.__doc__
@@ -1437,28 +1328,29 @@ Options:::
 %s
     :::
 
- Function: isofill                        # Generate an isofill plot
-
- Description of Function:
     Generate a isofill plot given the data, isofill graphics method, and
     template. If no isofill class object is given, then the 'default' isofill
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('isofill')                   # Show all the existing isofill graphics methods
-    iso=a.getisofill('quick')           # Create instance of 'quick'
-    a.isofill(array,iso)                # Plot array using specified iso and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.isofill(array,iso,template)       # Plot array using specified iso and template
+     # Show all the existing isofill graphics methods
+    a.show('isofill')
+    # Create instance of 'quick'
+    iso=a.getisofill('quick')
+    # Plot array using specified iso and default template
+    a.isofill(array,iso)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified iso and template
+    a.isofill(array,iso,template)
 
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End isofill Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+:returns: ???
+:rtype: ???
 
 """
         arglist = _determine_arg_list('isofill', args)
@@ -1466,11 +1358,6 @@ Options:::
     isofill.__doc__ = isofill.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output)
 
-    ##########################################################################
-    #                                                                           #
-    # Isoline functions for VCS.                                                #
-    #                                                                           #
-    ##########################################################################
     def createisoline(self, name=None, source='default'):
         return vcs.createisoline(name, source)
     createisoline.__doc__ = vcs.manageElements.createisoline.__doc__
@@ -1493,15 +1380,15 @@ Options:::
 %s
     :::
 
- Function: isoline                        # Generate an isoline plot
-
- Description of Function:
     Generate a isoline plot given the data, isoline graphics method, and
     template. If no isoline class object is given, then the 'default' isoline
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.show('isoline')                   # Show all the existing isoline graphics methods
     iso=a.getisoline('quick')           # Create instance of 'quick'
@@ -1510,11 +1397,8 @@ Options:::
     a.clear()                           # Clear VCS canvas
     a.isoline(array,iso,template)       # Plot array using specified iso and template
 
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End isoline Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+:returns: ???
+:rtype: ???
 
 """
         arglist = _determine_arg_list('isoline', args)
@@ -1530,11 +1414,6 @@ Options:::
         return vcs.get1d(name)
     create1d.__doc__ = vcs.manageElements.create1d.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Xyvsy functions for VCS.                                                  #
-    #                                                                           #
-    ##########################################################################
     def createxyvsy(self, name=None, source='default'):
         return vcs.createxyvsy(name, source)
     createxyvsy.__doc__ = vcs.manageElements.createxyvsy.__doc__
@@ -1557,40 +1436,35 @@ Options:::
 %s
     :::
 
- Function: xyvsy                        # Generate a Xyvsy plot
-
- Description of Function:
     Generate a Xyvsy plot given the data, Xyvsy graphics method, and
     template. If no Xyvsy class object is given, then the 'default' Xyvsy
     graphics method is used. Simerly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
-    a=vcs.init()
-    a.show('xyvsy')                   # Show all the existing Xyvsy graphics methods
-    xyy=a.getxyvsy('quick')           # Create instance of 'quick'
-    a.xyvsy(array,xyy)                # Plot array using specified xyy and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.xyvsy(array,xyy,template)       # Plot array using specified xyy and template
+    :Example:
+
+::
 
-#################################################################################################################
-###########################################                       ###############################################
-########################################## End xyvsy Description ################################################
-#########################################                       #################################################
-#################################################################################################################
+    a=vcs.init()
+    # Show all the existing Xyvsy graphics methods
+    a.show('xyvsy')
+    # Create instance of 'quick'
+    xyy=a.getxyvsy('quick')
+    # Plot array using specified xyy and default template
+    a.xyvsy(array,xyy)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified xyy and template
+    a.xyvsy(array,xyy,template)
 
+:returns:
+:rtype:
 """
         arglist = _determine_arg_list('xyvsy', args)
         return self.__plot(arglist, parms)
     xyvsy.__doc__ = xyvsy.__doc__ % (
         plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output)
 
-    ##########################################################################
-    #                                                                           #
-    # Yxvsx functions for VCS.                                                  #
-    #                                                                           #
-    ##########################################################################
     def createyxvsx(self, name=None, source='default'):
         return vcs.createyxvsx(name, source)
     createyxvsx.__doc__ = vcs.manageElements.createyxvsx.__doc__
@@ -1613,40 +1487,35 @@ Options:::
 %s
     :::
 
- Function: yxvsx                        # Generate a Yxvsx plot
-
- Description of Function:
     Generate a Yxvsx plot given the data, Yxvsx graphics method, and
     template. If no Yxvsx class object is given, then the 'default' Yxvsx
     graphics method is used. Simerly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    a.show('yxvsx')                   # Show all the existing Yxvsx graphics methods
-    yxx=a.getyxvsx('quick')           # Create instance of 'quick'
-    a.yxvsx(array,yxx)                # Plot array using specified yxx and default
-                                      #       template
-    a.clear()                         # Clear VCS canvas
-    a.yxvsx(array,yxx,template)       # Plot array using specified yxx and template
-
-#################################################################################################################
-###########################################                       ###############################################
-########################################## End yxvsx Description ################################################
-#########################################                       #################################################
-#################################################################################################################
+    # Show all the existing Yxvsx graphics methods
+    a.show('yxvsx')
+    # Create instance of 'quick'
+    yxx=a.getyxvsx('quick')
+    # Plot array using specified yxx and default template
+    a.yxvsx(array,yxx)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified yxx and template
+    a.yxvsx(array,yxx,template)
 
+:returns:
+:rtype:
 """
         arglist = _determine_arg_list('yxvsx', args)
         return self.__plot(arglist, parms)
     yxvsx.__doc__ = yxvsx.__doc__ % (
         plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output)
 
-    ##########################################################################
-    #                                                                           #
-    # XvsY functions for VCS.                                                   #
-    #                                                                           #
-    ##########################################################################
     def createxvsy(self, name=None, source='default'):
         return vcs.createxvsy(name, source)
     createxvsy.__doc__ = vcs.manageElements.createxvsy.__doc__
@@ -1667,29 +1536,29 @@ Options:::
 %s
     :::
 
- Function: xvsy                      # Generate a XvsY plot
-
- Description of Function:
     Generate a XvsY plot given the data, XvsY graphics method, and
     template. If no XvsY class object is given, then the 'default' XvsY
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    a.show('xvsy')                   # Show all the existing XvsY graphics methods
-    xy=a.getxvsy('quick')            # Create instance of 'quick'
-    a.xvsy(array,xy)                 # Plot array using specified xy and default
-                                     #       template
-    a.clear()                        # Clear VCS canvas
-    a.xvsy(array,xy,template)        # Plot array using specified xy and template
-
-#################################################################################################################
-###########################################                       ###############################################
-########################################## End xvsy Description ################################################
-#########################################                       #################################################
-#################################################################################################################
+    # Show all the existing XvsY graphics methods
+    a.show('xvsy')
+    # Create instance of 'quick'
+    xy=a.getxvsy('quick')
+    # Plot array using specified xy and default template
+    a.xvsy(array,xy)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified xy and template
+    a.xvsy(array,xy,template)
 
+:returns:
+:rtype:
 """
         arglist = _determine_arg_list('xvsy', args)
         return self.__plot(arglist, parms)
@@ -1699,11 +1568,6 @@ Options:::
                                    plot_2_1D_input,
                                    plot_output)
 
-    ##########################################################################
-    #                                                                           #
-    # Vector functions for VCS.                                                 #
-    #                                                                           #
-    ##########################################################################
     def createvector(self, name=None, source='default'):
         return vcs.createvector(name, source)
     createvector.__doc__ = vcs.manageElements.createvector.__doc__
@@ -1714,31 +1578,30 @@ Options:::
 
     def vector(self, *args, **parms):
         """
- Function: vector                      # Generate a vector plot
-
- Description of Function:
     Generate a vector plot given the data, vector graphics method, and
     template. If no vector class object is given, then the 'default' vector
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('vector')                   # Show all the existing vector graphics methods
-    vec=a.getvector('quick')           # Create instance of 'quick'
-    a.vector(array,vec)                # Plot array using specified vec and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.vector(array,vec,template)       # Plot array using specified vec and template
+    # Show all the existing vector graphics methods
+    a.show('vector')
+    # Create instance of 'quick'
+    vec=a.getvector('quick')
+    # Plot array using specified vec and default template
+    a.vector(array,vec)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified vec and template
+    a.vector(array,vec,template)
 """
         arglist = _determine_arg_list('vector', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
-    #                                                                           #
-    # Scatter functions for VCS.                                                #
-    #                                                                           #
-    ##########################################################################
     def createscatter(self, name=None, source='default'):
         return vcs.createscatter(name, source)
     createscatter.__doc__ = vcs.manageElements.createscatter.__doc__
@@ -1758,29 +1621,27 @@ Options:::
  Output:::
 %s
     :::
-
- Function: scatter                      # Generate a scatter plot
-
- Description of Function:
     Generate a scatter plot given the data, scatter graphics method, and
     template. If no scatter class object is given, then the 'default' scatter
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('scatter')                   # Show all the existing scatter graphics methods
-    sct=a.getscatter('quick')           # Create instance of 'quick'
-    a.scatter(array,sct)                # Plot array using specified sct and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.scatter(array,sct,template)       # Plot array using specified sct and template
+    # Show all the existing scatter graphics methods
+    a.show('scatter')
+    # Create instance of 'quick'
+    sct=a.getscatter('quick')
+    # Plot array using specified sct and default template
+    a.scatter(array,sct)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified sct and template
+    a.scatter(array,sct,template)
 
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End scatter Description ################################################
-#########################################                         #################################################
-###################################################################################################################
 
 """
 
@@ -1789,11 +1650,6 @@ Options:::
     scatter.__doc__ = scatter.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2_1D_input, plot_output)
 
-    ##########################################################################
-    #                                                                           #
-    # Line  functions for VCS.                                                  #
-    #                                                                           #
-    ##########################################################################
     def createline(self, name=None, source='default', ltype=None,  # noqa
                    width=None, color=None, priority=None,
                    viewport=None, worldcoordinate=None,
@@ -1812,22 +1668,31 @@ Options:::
 
     def line(self, *args, **parms):
         """
- Function: line                           # Generate a line plot
-
- Description of Function:
     Plot a line segment on the Vcs Canvas. If no line class
     object is given, then an error will be returned.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('line')                      # Show all the existing line objects
-    ln=a.getline('red')                 # Create instance of 'red'
-    ln.width=4                          # Set the line width
-    ln.color = 242                      # Set the line color
-    ln.type = 4                         # Set the line type
-    ln.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-    ln.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-    a.line(ln)                          # Plot using specified line object
+    # Show all the existing line objects
+    a.show('line')
+    # Create instance of 'red'
+    ln=a.getline('red')
+    # Set the line width
+    ln.width=4
+    # Set the line color
+    ln.color = 242
+    # Set the line type
+    ln.type = 4
+    # Set the x value points
+    ln.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+    # Set the y value points
+    ln.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+    # Plot using specified line object
+    a.line(ln)
+
 """
         arglist = _determine_arg_list('line', args)
         return self.__plot(arglist, parms)
@@ -1837,20 +1702,58 @@ Options:::
                  worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                  x=None, y=None, projection='default', bg=0):
         """
- Function: drawline                           # Generate and draw a line plot
-
- Description of Function:
     Generate and draw a line object on the VCS Canvas.
 
- Example of Use:
+     :Example:
+
+ ::
+
     a=vcs.init()
-    a.show('line')                      # Show all the existing line objects
+    # Show all the existing line objects
+    a.show('line')
+    # Create instance of line object 'red'
     ln=a.drawline(name='red', ltype='dash', width=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  color=242, priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of line object 'red'
-    a.line(ln)                          # Plot using specified line object
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified line object
+    a.line(ln)
+
+
+:param name: Name of created object
+:type name: str
+
+:param ltype: One of "dash", "dash-dot", "solid", "dot", or "long-dash".
+:type ltype: str
+
+:param width: Thickness of the line to be drawn
+:type width: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the line will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.
+:type projection: str or projection object
+
+:returns: ???
+:rtype: ???
 """
         if (name is None) or (not isinstance(name, str)):
             raise vcsError('Must provide string name for the line.')
@@ -1873,11 +1776,6 @@ Options:::
 
         return ln
 
-    ##########################################################################
-    #                                                                           #
-    # Marker  functions for VCS.                                                #
-    #                                                                           #
-    ##########################################################################
     def createmarker(self, name=None, source='default', mtype=None,  # noqa
                      size=None, color=None, priority=1,
                      viewport=None, worldcoordinate=None,
@@ -1896,23 +1794,31 @@ Options:::
 
     def marker(self, *args, **parms):
         """
- Function: marker                           # Generate a marker plot
-
- Description of Function:
-    Plot a marker segment on the Vcs Canvas. If no marker class
-    object is given, then an error will be returned.
+        Plot a marker segment on the Vcs Canvas. If no marker class
+        object is given, then an error will be returned.
 
- Example of Use:
-    a=vcs.init()
-    a.show('marker')                     # Show all the existing marker objects
-    mrk=a.getmarker('red')               # Create instance of 'red'
-    mrk.size=4                           # Set the marker size
-    mrk.color = 242                      # Set the marker color
-    mrk.type = 4                         # Set the marker type
-    mrk.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-    mrk.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-    a.marker(mrk)                          # Plot using specified marker object
-"""
+         :Example:
+            a=vcs.init()
+            # Show all the existing marker objects
+            a.show('marker')
+            # Create instance of 'red'
+            mrk=a.getmarker('red')
+            # Set the marker size
+            mrk.size=4
+            # Set the marker color
+            mrk.color = 242
+            # Set the marker type
+            mrk.type = 4
+            # Set the x value points
+            mrk.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+            # Set the y value points
+            mrk.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+            # Plot using specified marker object
+            a.marker(mrk)
+
+:returns: a VCS displayplot object
+:rtype: vcs.displayplot.Dp
+        """
         arglist = _determine_arg_list('marker', args)
         return self.__plot(arglist, parms)
 
@@ -1921,20 +1827,55 @@ Options:::
                    worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                    x=None, y=None, bg=0):
         """
- Function: drawmarker                           # Generate and draw a marker plot
+        Generate and draw a marker object on the VCS Canvas.
 
- Description of Function:
-    Generate and draw a marker object on the VCS Canvas.
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('marker')                      # Show all the existing marker objects
-    mrk=a.drawmarker(name='red', mtype='dash', size=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+    # Show all the existing marker objects
+    a.show('marker')
+    # Create instance of marker object 'red'
+    mrk=a.drawmarker(name='red', mtype='dot', size=2,
+                  color=242, priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of marker object 'red'
-    a.marker(mrk)                          # Plot using specified marker object
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified marker object
+    a.marker(mrk)
+
+
+:param name: Name of created object
+:type name: str
+
+:param mtype: Marker type, i.e. 'dot', 'plus', 'star, etc.
+:type mtype: str
+
+:param size: Size of the marker to draw
+:type size: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A drawmarker object
+:rtype: vcs.marker.Tm
 """
         if (name is None) or (not isinstance(name, str)):
             raise vcsError('Must provide string name for the marker.')
@@ -1956,11 +1897,6 @@ Options:::
 
         return mrk
 
-    ##########################################################################
-    #                                                                           #
-    # Fillarea  functions for VCS.                                              #
-    #                                                                           #
-    ##########################################################################
     def createfillarea(self, name=None, source='default', style=None,
                        index=None, color=None, priority=1,
                        viewport=None, worldcoordinate=None,
@@ -1980,23 +1916,35 @@ Options:::
 
     def fillarea(self, *args, **parms):
         """
- Function: fillarea                           # Generate a fillarea plot
+    Generate a fillarea plot
 
- Description of Function:
     Plot a fillarea segment on the Vcs Canvas. If no fillarea class
     object is given, then an error will be returned.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('fillarea')                  # Show all the existing fillarea objects
-    fa=a.getfillarea('red')             # Create instance of 'red'
-    fa.style=1                          # Set the fillarea style
-    fa.index=4                          # Set the fillarea index
-    fa.color = 242                      # Set the fillarea color
-    fa.type = 4                         # Set the fillarea type
-    fa.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-    fa.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-    a.fillarea(fa)                          # Plot using specified fillarea object
+    # Show all the existing fillarea objects
+    a.show('fillarea')
+    # Create instance of 'red'
+    fa=a.createfillarea('red')
+    # Set the fillarea style
+    fa.style=1
+    # Set the fillarea index
+    fa.index=4
+    # Set the fillarea color
+    fa.color = 'black'
+    # Set the x value points
+    fa.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+    # Set the y value points
+    fa.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+    # Plot using specified fillarea object
+    a.fillarea(fa)
+
+:returns: A fillarea object
+:rtype: vcs.displayplot.Dp
 """
         arglist = _determine_arg_list('fillarea', args)
         return self.__plot(arglist, parms)
@@ -2006,20 +1954,58 @@ Options:::
                      worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                      x=None, y=None, bg=0):
         """
- Function: drawfillarea                           # Generate and draw a fillarea plot
+        Generate and draw a fillarea object on the VCS Canvas.
 
- Description of Function:
-    Generate and draw a fillarea object on the VCS Canvas.
+        :Example:
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('fillarea')                      # Show all the existing fillarea objects
-    fa=a.drawfillarea(name='red', mtype='dash', size=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+    # Show all the existing fillarea objects
+    a.show('fillarea')
+    # Create instance of fillarea object 'red'
+    fa=a.drawfillarea(name='red', style=1, color=242,
+                  priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of fillarea object 'red'
-    a.fillarea(fa)                          # Plot using specified fillarea object
+                  y=[0,10,20,30,40,50], bg=0 )
+    # Plot using specified fillarea object
+    a.fillarea(fa)
+
+
+:param name: Name of created object
+:type name: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_
+              to fill the fillarea with. Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the fillarea will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param bg: Boolean value. True => object drawn in background (not shown on canvas). False => object shown on canvas.
+:type bg: bool
+
+:returns: A fillarea object
+:rtype: vcs.fillarea.Tf
 """
         if (name is None) or (not isinstance(name, str)):
             raise vcsError('Must provide string name for the fillarea.')
@@ -2041,11 +2027,6 @@ Options:::
 
         return fa
 
-    ##########################################################################
-    #                                                                           #
-    # Text Table  functions for VCS.                                            #
-    #                                                                           #
-    ##########################################################################
     def createtexttable(self, name=None, source='default', font=None,
                         spacing=None, expansion=None, color=None, priority=None,
                         viewport=None, worldcoordinate=None,
@@ -2063,11 +2044,6 @@ Options:::
                                 viewport, worldcoordinate, x, y)
     gettexttable.__doc__ = vcs.manageElements.gettexttable.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Text Orientation  functions for VCS.                                      #
-    #                                                                           #
-    ##########################################################################
     def createtextorientation(self, name=None, source='default'):
         return vcs.createtextorientation(name, source)
     createtextorientation.__doc__ = vcs.manageElements.createtextorientation.__doc__
@@ -2076,11 +2052,6 @@ Options:::
         return vcs.gettextorientation(To_name_src)
     gettextorientation.__doc__ = vcs.manageElements.gettextorientation.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Text Combined  functions for VCS.                                         #
-    #                                                                           #
-    ##########################################################################
     def createtextcombined(self, Tt_name=None, Tt_source='default', To_name=None, To_source='default',  # noqa
                            font=None, spacing=None, expansion=None, color=None,
                            priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
@@ -2108,28 +2079,42 @@ Options:::
 
     def textcombined(self, *args, **parms):
         """
-        Function: text or textcombined         # Generate a textcombined plot
+        Generate a textcombined plot
 
-        Description of Function:
         Plot a textcombined segment on the Vcs Canvas. If no textcombined class
         object is given, then an error will be returned.
+        *This function can also be called using the format **text(self, *args, **parms)** *
+
+        :Example:
+
+::
 
-        Example of Use:
         a=vcs.init()
-        a.show('texttable')                 # Show all the existing texttable objects
-        a.show('textorientation')           # Show all the existing textorientation objects
-        tt=a.gettext('std','7left')         # Create instance of 'std' and '7left'
-        tt.string = 'Text1'                 # Show the string "Text1" on the VCS Canvas
-        tt.font=2                           # Set the text size
-        tt.color = 242                      # Set the text color
-        tt.angle = 45                       # Set the text angle
-        tt.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-        tt.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-        a.text(tt)                          # Plot using specified text object
-
-        Optionally you can pass a string, the coordinates and any keyword
-        Example:
-        x.plot('Hi',.5,.5,color=241,angle=45)
+        # Show all the existing texttable objects
+        a.show('texttable')
+        # Show all the existing textorientation objects
+        a.show('textorientation')
+        # Create instance of 'std' and '7left'
+        tt=a.gettext('std','7left')
+        # Show the string "Text1" on the VCS Canvas
+        tt.string = 'Text1'
+        # Set the text size
+        tt.font=2
+        # Set the text color
+        tt.color = 242
+        # Set the text angle
+        tt.angle = 45
+        # Set the x value points
+        tt.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+        # Set the y value points
+        tt.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+        # Plot using specified text object
+        a.text(tt)
+        #Optionally you can pass a string, the coordinates, and any keyword
+        a.plot('Hi',.5,.5,color=241,angle=45)
+
+:returns: ???
+:rtype: ???
         """
         # First check if color is a string
         if 'color' in parms.keys():
@@ -2164,7 +2149,11 @@ Options:::
 
     def gettextextent(self, textobject):
         """Returns the coordinate of the box surrounding a text object once printed
-        Example:
+
+        :Example:
+
+::
+
         x=vcs.init()
         t=x.createtext()
         t.x=[.5]
@@ -2172,6 +2161,13 @@ Options:::
         t.string=['Hello World']
         extent = x.gettextextent(t)
         print extent
+
+:param textobject: A VCS text object
+:type textobject: ???
+
+:returns: ???
+:rtype: ???
+
         """
         if not vcs.istext(textobject):
             raise vcsError('You must pass a text object')
@@ -2190,20 +2186,59 @@ Options:::
                          worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                          x=None, y=None, bg=0):
         """
- Function: drawtexttable                           # Generate and draw a texttable plot
+    Generate and draw a textcombined object on the VCS Canvas.
 
- Description of Function:
-    Generate and draw a texttable object on the VCS Canvas.
+     :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('texttable')                      # Show all the existing texttable objects
-    tt=a.drawtexttable(Tt_name = 'red', To_name='7left', mtype='dash', size=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+    # Show all the existing texttable objects
+    a.show('texttable')
+    # Create instance of texttable object 'red'
+    tc=a.drawtextcombined(Tc_name = 'red', To_name='7left', mtype='dash', size=2,
+                  color=242, priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of texttable object 'red'
-    a.texttable(tt)                          # Plot using specified texttable object
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified texttable object
+    a.texttable(tt)
+
+
+:param name: Name of created object
+:type name: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_
+              to fill the fillarea with. Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the fillarea will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param bg: Boolean value. True => object drawn in background (not shown on canvas). False => object shown on canvas.
+:type bg: bool
+
+:returns: A texttable object
+:rtype:
 """
         if (Tt_name is None) or (not isinstance(Tt_name, str)):
             raise vcsError('Must provide string name for the texttable.')
@@ -2244,24 +2279,19 @@ Options:::
     # Set alias for the secondary drawtextcombined.
     drawtext = drawtextcombined
 
-    _plot_keywords_ = ['variable', 'grid', 'xaxis', 'yaxis', 'xrev', 'yrev', 'continents', 'xarray', 'yarray',
-                       'name', 'time', 'units', 'ymd', 'file_comment',
-                       'xbounds', 'ybounds', 'xname', 'yname', 'xunits', 'yunits', 'xweights', 'yweights',
-                       'comment1', 'comment2', 'comment3', 'comment4', 'hms', 'long_name', 'zaxis',
-                       'zarray', 'zname', 'zunits', 'taxis', 'tarray', 'tname', 'tunits', 'waxis', 'warray',
-                       'wname', 'wunits', 'bg', 'ratio', 'donotstoredisplay', 'render', 'continents_line']
+    _plot_keywords_ = ['variable', 'grid', 'xaxis', 'xarray',  'xrev', 'yaxis', 'yarray', 'yrev', 'continents',
+                       'xbounds', 'ybounds', 'zaxis', 'zarray', 'taxis', 'tarray', 'waxis', 'warray', 'bg', 'ratio',
+                       'donotstoredisplay', 'render', 'continents_line', "display_name"]
 
+    _deprecated_plot_keywords_ = ["time", "units", "file_comment", "xname", "yname", "zname", "tname", "wname",
+                                  "xunits", "yunits", "zunits", "tunits", "wunits", "comment1", "comment2", "comment3",
+                                  "comment4", "long_name"]
     # def replot(self):
     #    """ Clears and plots with last used plot arguments
     #    """
     #    self.clear()
     #    self.plot(*self.__last_plot_actual_args, **self.__last_plot_keyargs)
 
-    ###########################################################################
-    #                                                                         #
-    # Plot wrapper for VCS.                                                   #
-    #                                                                         #
-    ###########################################################################
     def plot(self, *actual_args, **keyargs):
         """
 Options:::
@@ -2275,9 +2305,6 @@ Options:::
 %s
     :::
 
- Function: plot
-
- Description of plot:
     Plot an array(s) of data given a template and graphics method. The VCS template is
     used to define where the data and variable attributes will be displayed on the VCS
     Canvas. The VCS graphics method is used to define how the array(s) will be shown
@@ -2297,28 +2324,10 @@ Options:::
             '3',3: y dim will be 3 times bigger than x dim (restricted to original tempalte.data area
             Adding a 't' at the end of the ratio, makes the tickmarks and boxes move along.
 
-    Variable attribute keys:
-       comment1         = string   #Comment plotted above file_comment
-       comment2         = string   #Comment plotted above comment1
-       comment3         = string   #Comment plotted above comment2
-       comment4         = string   #Comment plotted above comment4
-       file_comment     = string   #Comment (defaults to file.comment)
-       hms              = string (hh:mm:ss) #Hour, minute, second
-       long_name        = string   #Descriptive variable name
-       name             = string   #Variable name (defaults to var.id)
-       time             = cdtime   #instance (relative or absolute),
-                                    cdtime, reltime or abstime value
-       units            = string   #Variable units
-       ymd              = string (yy/mm/dd) #Year, month, day
-
     Dimension attribute keys (dimension length=n):
        [x|y|z|t|w]array = NumPy array of length n    # x or y Dimension values
        [x|y|z|t|w]array = NumPy array of length n    # x or y Dimension values
        [x|y]bounds       = NumPy array of shape (n,2) # x or y Dimension boundaries
-       [x|y|z|t|w]name   = string                     # x or y Dimension name
-       [x|y|z|t|w]units  = string                     # x or y Dimension units
-       [x|y]weights      = NumPy array of length n    # x or y Dimension weights (used to
-                                                        calculate area-weighted mean)
 
     CDMS object:
        [x|y|z|t|w]axis   = CDMS axis object           # x or y Axis
@@ -2328,7 +2337,7 @@ Options:::
     Other:
        [x|y]rev         = 0|1                         # if ==1, reverse the direction of the x
                                                              or y axis
-       continents	= 0,1,2,3,4,5,6,7,8,9,10,11   #	if >=1, plot continental outlines
+       continents   = 0,1,2,3,4,5,6,7,8,9,10,11   # if >=1, plot continental outlines
                                                              (default: plot if xaxis is
                                                              longitude, yaxis is latitude -or-
                                                              xname is 'longitude' and yname is
@@ -2345,7 +2354,17 @@ Options:::
                                                       # Values 6 through 11 signify the line type
                                                       # defined by the files data_continent_other7
                                                       # through data_continent_other12.
-
+        continents_line = vcs.getline("default")    # VCS line object to define continent appearance
+        donotstoredisplay = True|False              # Whether the displayplot object generated by this plot are stored
+        render = True|False                         # Whether to actually render the plot or not (useful for doing a
+                                                    # bunch of plots in a row)
+        display_name = "__display_123"                # VCS Display plot name (used to prevent duplicate display plots)
+        ratio = 1.5|"autot"|"auto"                   # Ratio of height/width for the plot; autot and auto will choose a
+                                                     # "good" ratio for you.
+        plot_based_dual_grid = True | False          # Plot the actual grid or the dual grid based on what is
+                                                     # needed by the plot: isofill, isoline, vector need
+                                                     # point attributes, boxfill and meshfill need cell attributes
+                                                     # The default is True (if the parameter is not specified).
     Graphics Output in Background Mode:
        bg                 = 0|1   # if ==1, create images in the background
                                                              (Don't display the VCS Canvas)
@@ -2361,24 +2380,31 @@ Options:::
     if both 'xaxis' and 'grid' keywords are specified, the value of 'xaxis' takes precedence
     over the x-axis of grid.
 
- Example of Use:
-    x=vcs.init()        # x is an instance of the VCS class object (constructor)
-    x.plot(array)       # this call will use default settings for template and boxfill
-    x.plot(array, 'AMIP', 'isofill','AMIP_psl') # this is specifying the template and
-                                                  graphics method
-    t=x.gettemplate('AMIP')        # get a predefined the template 'AMIP'
-    vec=x.getvector('quick')       # get a predefined the vector graphics method 'quick'
-    x.plot(array1, array2, t, vec) # plot the data as a vector using the 'AMIP' template
-    x.clear()                      # clear the VCS Canvas of all plots
-    box=x.createboxfill('new')     # create boxfill graphics method 'new'
-    x.plot(box,t,array)            # plot array data using box 'new' and template 't'
-
-###############################################################################################################
-###########################################                      ##############################################
-########################################## End plot Description ###############################################
-#########################################                      ################################################
-###############################################################################################################
-
+     :Example:
+
+::
+
+    # x is an instance of the VCS class object (constructor)
+    x=vcs.init()
+    # this call will use default settings for template and boxfill
+    x.plot(array)
+    # this is specifying the template and graphics method
+    x.plot(array, 'AMIP', 'isofill','AMIP_psl')
+    # get a predefined the template 'AMIP'
+    t=x.gettemplate('AMIP')
+    # get a predefined the vector graphics method 'quick'
+    vec=x.getvector('quick')
+    # plot the data as a vector using the 'AMIP' template
+    x.plot(array1, array2, t, vec)
+    # clear the VCS Canvas of all plots
+    x.clear()
+    # create boxfill graphics method 'new'
+    box=x.createboxfill('new')
+    # plot array data using box 'new' and template 't'
+    x.plot(box,t,array)
+
+:returns: ???
+:rtype: ???
 """
         self.__last_plot_actual_args = actual_args
         self.__last_plot_keyargs = keyargs
@@ -3464,8 +3490,13 @@ Options:::
                     "unknown taylordiagram graphic method: %s" %
                     arglist[4])
             t.plot(arglist[0], canvas=self, template=arglist[2], **keyargs)
-            nm, src = self.check_name_source(None, "default", "display")
-            dn = displayplot.Dp(nm)
+
+            dname = keyargs.get("display_name")
+            if dname is not None:
+                dn = vcs.elements["display"][dname]
+            else:
+                nm, src = self.check_name_source(None, "default", "display")
+                dn = displayplot.Dp(nm)
             dn.continents = self.getcontinentstype()
             dn.continents_line = self.getcontinentsline()
             dn.template = arglist[2]
@@ -3473,7 +3504,6 @@ Options:::
             dn.g_name = arglist[4]
             dn.array = arglist[:2]
             dn.extradisplays = t.displays
-# dn.array=arglist[0]
             for p in slab_changed_attributes.keys():
                 tmp = slab_changed_attributes[p]
                 if tmp == (None, None):
@@ -3498,15 +3528,18 @@ Options:::
                 if hasattr(gm, "priority") and gm.priority == 0:
                     return
             p = self.getprojection(gm.projection)
-            if p.type in round_projections and (
+            if p.type in no_deformation_projections and (
                     doratio == "0" or doratio[:4] == "auto"):
                 doratio = "1t"
             for keyarg in keyargs.keys():
-                if keyarg not in self.__class__._plot_keywords_ + \
-                        self.backend._plot_keywords:
-                    warnings.warn(
-                        'Unrecognized vcs plot keyword: %s, assuming backend (%s) keyword' %
-                        (keyarg, self.backend.type))
+                if keyarg not in self.__class__._plot_keywords_ + self.backend._plot_keywords:
+                    if keyarg in self.__class__._deprecated_plot_keywords_:
+                        warnings.warn("Deprecation Warning: Keyword '%s' will be removed in the next version"
+                                      "of UV-CDAT." % keyarg)
+                    else:
+                        warnings.warn(
+                            'Unrecognized vcs plot keyword: %s, assuming backend (%s) keyword' %
+                            (keyarg, self.backend.type))
 
             if arglist[0] is not None or 'variable' in keyargs:
                 arglist[0] = self._reconstruct_tv(arglist, keyargs)
@@ -3552,7 +3585,7 @@ Options:::
                 t.data.y2 = p.viewport[3]
 
                 proj = self.getprojection(p.projection)
-                if proj.type in round_projections and (
+                if proj.type in no_deformation_projections and (
                         doratio == "0" or doratio[:4] == "auto"):
                     doratio = "1t"
 
@@ -3608,7 +3641,7 @@ Options:::
                         tp = "textcombined"
                     gm = vcs.elements[tp][arglist[4]]
                 p = self.getprojection(gm.projection)
-                if p.type in round_projections:
+                if p.type in no_deformation_projections:
                     doratio = "1t"
                 if p.type == 'linear':
                     if gm.g_name == 'Gfm':
@@ -3725,9 +3758,13 @@ Options:::
             else:
                 returned_kargs = self.backend.plot(*arglist, **keyargs)
                 if not keyargs.get("donotstoredisplay", False):
-                    nm, src = self.check_name_source(
-                        None, "default", "display")
-                    dn = displayplot.Dp(nm, parent=self)
+                    dname = keyargs.get("display_name")
+                    if dname is not None:
+                        dn = vcs.elements['display'][dname]
+                    else:
+                        nm, src = self.check_name_source(
+                            None, "default", "display")
+                        dn = displayplot.Dp(nm, parent=self)
                     dn.template = arglist[2]
                     dn.g_type = arglist[3]
                     dn.g_name = arglist[4]
@@ -3795,81 +3832,46 @@ Options:::
     def setAnimationStepper(self, stepper):
         self.backend.setAnimationStepper(stepper)
 
-    ##########################################################################
-    #                                                                           #
-    # VCS utility wrapper to return the number of displays that are "ON".       #
-    #                                                                           #
-    ##########################################################################
-    def return_display_ON_num(self, *args):
-        return self.canvas.return_display_ON_num(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # VCS utility wrapper to return the current display names.                  #
-    #                                                                           #
-    ##########################################################################
     def return_display_names(self, *args):
         return self.display_names
 
-    ##########################################################################
-    #                                                                           #
-    # VCS utility wrapper to remove the display names.                          #
-    #                                                                           #
-    ##########################################################################
     def remove_display_name(self, *args):
-        return self.canvas.remove_display_name(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # CGM  wrapper for VCS.                                                     #
-    #                                                                           #
-    ##########################################################################
-    def cgm(self, file, mode='w'):
         """
- Function: cgm
+        Removes a plotted item from the canvas.
 
- Description of Function:
-    To save a graphics plot in CDAT the user can call CGM along with the name of
-    the output. This routine will save the displayed image on the VCS canvas as
-    a binary vector graphics that can be imported into MSWord or Framemaker. CGM
-    files are in ISO standards output format.
+        :param args: Any number of display names to remove.
+        :type args: str list
+        """
+        for a in args:
+            if a in self.display_names:
+                self.display_names.remove(a)
+        self.update()
 
-    The CGM command is used to create or append to a cgm file. There are two modes
-    for saving a cgm file: `Append' mode (a) appends cgm output to an existing cgm
-    file; `Replace' (r) mode overwrites an existing cgm file with new cgm output.
-    The default mode is to overwrite an existing cgm file (i.e. mode (r)).
+    def cgm(self, file, mode='w'):
+        """
+        Export an image in CGM format.
 
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.cgm(o)
-    a.cgm('example')           # by default a cgm file will overwrite an existing file
-    a.cgm('example','w')  # 'r' will instruct cgm to overwrite an existing file
-    a.cgm('example',mode='w')  # 'r' will instruct cgm to overwrite an existing file
+        :param file: Filename to save
+        :param mode: Ignored.
+        """
 
-"""
         if mode != 'w':
             warnings.warn(
                 "cgm only supports 'w' mode ignoring your mode ('%s')" %
                 mode)
         return self.backend.cgm(file)
 
-    ##########################################################################
-    #                                                                           #
-    # Clear VCS Canvas wrapper for VCS.                                         #
-    #                                                                           #
-    ##########################################################################
     def clear(self, *args, **kargs):
         """
- Function: clear
+        Clears all the VCS displays on a page (i.e., the VCS Canvas object).
 
- Description of Function:
-    In VCS it is necessary to clear all the plots from a page. This routine
-    will clear all the VCS displays on a page (i.e., the VCS Canvas object).
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
+    #clear VCS displays from the page
     a.clear()
 
 """
@@ -3879,6 +3881,10 @@ Options:::
             self.configurator.stop_animating()
         self.animate_info = []
         self.animate.update_animate_display_list()
+
+        preserve_display = kargs.get("preserve_display", False)
+        if "preserve_display" in kargs:
+            del kargs["preserve_display"]
         self.backend.clear(*args, **kargs)
         for nm in self.display_names:
             # Lets look at elements created by dispaly production
@@ -3893,26 +3899,23 @@ Options:::
                     for k in new_elts[e]:
                         if k in vcs.elements[e].keys():
                             del(vcs.elements[e][k])
-            del(vcs.elements["display"][nm])
+            if not preserve_display:
+                del(vcs.elements["display"][nm])
         self.display_names = []
         return
 
-    ##########################################################################
-    #                                                                           #
-    # Close VCS Canvas wrapper for VCS.                                         #
-    #                                                                           #
-    ##########################################################################
     def close(self, *args, **kargs):
         """
- Function: close
+        Close the VCS Canvas. It will not deallocate the VCS Canvas object.
+        To deallocate the VCS Canvas, use the destroy method.
 
- Description of Function:
-    Close the VCS Canvas. It will not deallocate the VCS Canvas object.
-    To deallocate the VCS Canvas, use the destroy method.
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
+    #close the vcs canvas
     a.close()
 
 """
@@ -3923,19 +3926,14 @@ Options:::
 
         return a
 
-    ##########################################################################
-    #                                                                           #
-    # Destroy VCS Canvas Object (i.e., call the Dealloc C code).      		#
-    #                                                                           #
-    ##########################################################################
     def destroy(self):
         """
- Function: destroy
-
- Description of Function:
     Destroy the VCS Canvas. It will deallocate the VCS Canvas object.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.destory()
@@ -3947,136 +3945,42 @@ Options:::
         gc.garbage
         gc.collect()
 
-    ##########################################################################
-    #                                                                           #
-    # Graphics Method Change display.                                           #
-    #                                                                           #
-    ##########################################################################
     def change_display_graphic_method(self, display, type, name):
         '''
- Function: change_display_graphic_method
-
- Description of Function:
-    Changes the type and graphic metohd of a display.
+        Changes the type and graphic method of a plot.
+
+        :param display: Display to change.
+        :param type: New graphics method type.
+        :param name: Name of new graphics method.
+        :type display: str or vcs.displayplot.Dp
+        :type name: str
+        :type type: str
+        '''
 
-'''
-        return self.canvas.change_display_graphic_method(
-            *(display, type, name))
-    ##########################################################################
-    #                                                                           #
-    # Figures out which display is selected in graphic method editor mode       #
-    #                                                                           #
-    ##########################################################################
+        if isinstance(display, (str, unicode)):
+            display = vcs.elements["display"][display]
+        display.g_type = type
+        display.g_name = name
+        self.update()
 
     def get_selected_display(self):
         """
- Function: get_selected_display
+        Deprecated. Does not work.
 
     """
         return self.canvas.get_selected_display(*())
 
-    ##########################################################################
-    #                                                                           #
-    # Send a request to turn on a picture template object in the VCS Canvas.    #
-    #                                                                           #
-    ##########################################################################
-    def _select_one(self, template_name, attr_name, X1, X2, Y1, Y2):
-        # flush and block the X main loop
-
-        self.canvas._select_one(template_name, attr_name, X1, X2, Y1, Y2)
-
-    ##########################################################################
-    #                                                                           #
-    # Send a request to turn off a picture template object in the VCS Canvas.   #
-    #                                                                           #
-    ##########################################################################
-    def _unselect_one(self, template_name, attr_name, X1, X2, Y1, Y2):
-
-        self.canvas._unselect_one(template_name, attr_name, X1, X2, Y1, Y2)
-
-    ##########################################################################
-    #                                                                           #
-    # Set the template editor event flag to select all template objects on the  #
-    # VCS Canvas.                                                               #
-    #                                                                           #
-    ##########################################################################
-    def _select_all(self):
-        # flush and block the X main loop
-
-        self.canvas._select_all()
-
-    ##########################################################################
-    #                                                                           #
-    # Set the template editor event flag to unselect all the template objects   #
-    # on the VCS Canvas.                                                        #
-    #                                                                           #
-    ##########################################################################
-    def _unselect_all(self):
-        # flush and block the X main loop
-
-        self.canvas._unselect_all()
-
-    ##########################################################################
-    #                                                                           #
-    # Set the template editor mode for the VCS Canvas screen.                   #
-    #                                                                           #
-    ##########################################################################
-    def _SCREEN_TEMPLATE_FLAG(self):
-        self.canvas.SCREEN_TEMPLATE_FLAG()
-
-    ##########################################################################
-    #                                                                           #
-    # Set the graphic method editor mode for the VCS Canvas screen.                   #
-    #                                                                           #
-    ##########################################################################
-    def _SCREEN_GM_FLAG(self):
-        self.canvas.SCREEN_GM_FLAG()
-
-    ##########################################################################
-    #                                                                           #
-    # Set the data mode for the VCS Canvas screen.                              #
-    #                                                                           #
-    ##########################################################################
-    def _SCREEN_DATA_FLAG(self):
-        self.canvas.SCREEN_DATA_FLAG()
-
-    ##########################################################################
-    #                                                                           #
-    # Set the screen check mode to DATA for the VCS Canvas.                     #
-    #                                                                           #
-    ##########################################################################
-    def _SCREEN_CHECKMODE_DATA_FLAG(self):
-        self.canvas.SCREEN_CHECKMODE_DATA_FLAG()
-
-    ##########################################################################
-    #                                                                           #
-    # Return the Screen mode, either data mode or template editor mode.         #
-    #                                                                           #
-    ##########################################################################
-    def SCREEN_MODE(self, *args):
-        return self.canvas.SCREEN_MODE(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Return the Screen mode, either data mode or template editor mode.         #
-    #                                                                           #
-    ##########################################################################
     def plot_annotation(self, *args):
         self.canvas.plot_annotation(*args)
 
-    ##########################################################################
-    #                                                                           #
-    # Flush X event que wrapper for VCS.                                        #
-    #                                                                           #
-    ##########################################################################
     def flush(self, *args):
         """
- Function: flush
+    The flush command executes all buffered X events in the queue.
 
- Description of Function:
-    The flush command executes all buffered X events in the que.
+    :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.flush()
@@ -4084,24 +3988,22 @@ Options:::
 """
         return self.backend.flush(*args)
 
-    ##########################################################################
-    #                                                                           #
-    # Geometry wrapper for VCS.                                                 #
-    #                                                                           #
-    ##########################################################################
     def geometry(self, *args):
         """
- Function: geometry
-
- Description of Function:
     The geometry command is used to set the size and position of the VCS canvas.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.geometry(450,337)
 
 """
+        if len(args) == 0:
+            return self.backend.geometry()
+
         if (args[0] <= 0) or (args[1] <= 0):
             raise ValueError(
                 'Error -  The width and height values must be an integer greater than 0.')
@@ -4111,67 +4013,61 @@ Options:::
 
         return a
 
-    ##########################################################################
-    #                                                                           #
-    # VCS Canvas Information wrapper.                                           #
-    #                                                                           #
-    ##########################################################################
     def canvasinfo(self, *args, **kargs):
         """
- Function: canvasinfo
-
- Description of Function:
-    Obtain the current attributes of the VCS Canvas window.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.canvasinfo()
+        Obtain the current attributes of the VCS Canvas window.
 
-"""
+        :returns: Dictionary with keys: "mapstate" (whether the canvas is opened), "height", "width", "depth", "x", "y"
+        """
         return self.backend.canvasinfo(*args, **kargs)
 
-    ##########################################################################
-    #                                                                           #
-    # Get continents type wrapper for VCS.                                      #
-    #                                                                           #
-    ##########################################################################
     def getcontinentstype(self, *args):
         """
- Function: getcontinentstype
-
- Description of Function:
     Retrieve continents type from VCS; either an integer between 0 and 11 or the
     path to a custom continentstype.
 
- Example of Use:
+     :Example:
+
+::
+
      a=vcs.init()
-     cont_type = a.getcontinentstype() # Get the continents type
+     # Get the continents type
+     cont_type = a.getcontinentstype()
+
+:returns: An int between 1 and 0, or the path to a custom continentstype
+:rtype: int or system filepath
 """
         try:
             return self._continents
         except:
             return None
 
-    ###########################################################################
-    #                                                                         #
-    # Postscript to GIF wrapper for VCS.                                      #
-    #                                                                         #
-    ###########################################################################
     def pstogif(self, filename, *opt):
         """
-  Function: pstogif
-
-  Description of Function:
      In some cases, the user may want to save the plot out as a gif image. This
      routine allows the user to convert a postscript file to a gif file.
 
-  Example of Use:
+    :Example:
+
+::
+
      a=vcs.init()
      a.plot(array)
-     a.pstogif('filename.ps')       # convert the postscript file to a gif file (l=landscape)
-     a.pstogif('filename.ps','l')   # convert the postscript file to a gif file (l=landscape)
-     a.pstogif('filename.ps','p')   # convert the postscript file to a gif file (p=portrait)
+     # convert the postscript file to a gif file (l=landscape)
+     a.pstogif('filename.ps')
+     # convert the postscript file to a gif file (l=landscape)
+     a.pstogif('filename.ps','l')
+     # convert the postscript file to a gif file (p=portrait)
+     a.pstogif('filename.ps','p')
+
+:param filename: String name of the desired output file
+:type filename: str
+
+:param opt: One of 'l' or 'p', indicating landscape or portrait mode, respectively.
+:type opt: str
+
+:returns: ???
+:rtype: ???
  """
         from os import popen
 
@@ -4201,22 +4097,17 @@ Options:::
         f.close()
         return
 
-    ##########################################################################
-    #                                                                           #
-    # Grid wrapper for VCS.                                                     #
-    #                                                                           #
-    ##########################################################################
     def grid(self, *args):
         """
- Function: grid
-
- Description of Function:
     Set the default plotting region for variables that have more dimension values
     than the graphics method. This will also be used for animating plots over the
     third and fourth dimensions.
 
 
- Example of Use:
+ :Example:
+
+::
+
     a=vcs.init()
     a.grid(12,12,0,71,0,45)
 """
@@ -4225,16 +4116,8 @@ Options:::
 
         return p
 
-    ##########################################################################
-    #                                                                           #
-    # Landscape VCS Canvas orientation wrapper for VCS.                         #
-    #                                                                           #
-    ##########################################################################
     def landscape(self, width=-99, height=-99, x=-99, y=-99, clear=0):
         """
- Function: landscape
-
- Description of Function:
     Change the VCS Canvas orientation to Landscape.
 
      Note: the (width, height) and (x, y) arguments work in pairs. That is, you must
@@ -4248,14 +4131,37 @@ Options:::
                 the screen with the point. Some X servers are not handling the threads properly
                 to keep up with the demands of the X client.
 
- Example of Use:
+     :Example:
+::
+
     a=vcs.init()
     a.plot(array)
-    a.landscape() # Change the VCS Canvas orientation and set object flag to landscape
-    a.landscape(clear=1) # Change the VCS Canvas to landscape and clear the page
-    a.landscape(width = 400, height = 337) # Change to landscape and set the window size
-    a.landscape(x=100, y = 200) # Change to landscape and set the x and y screen position
-    a.landscape(width = 400, height = 337, x=100, y = 200, clear=1) # Chagne to landscape and give specifications
+    # Change the VCS Canvas orientation and set object flag to landscape
+    a.landscape()
+    # Change the VCS Canvas to landscape and clear the page
+    a.landscape(clear=1)
+    # Change to landscape and set the window size
+    a.landscape(width = 400, height = 337)
+    # Change to landscape and set the x and y screen position
+    a.landscape(x=100, y = 200)
+    # Change to landscape and give specifications
+    a.landscape(width = 400, height = 337, x=100, y = 200, clear=1)
+
+:param width: Width of the canvas, in pixels
+:type width: int
+
+:param height: Height of the canvas, in pixels
+:type height: int
+
+:param x: Unused
+:type x: int
+
+:param y: Unused
+:type y: int
+
+:param clear: Indicates the canvas should be cleared (1), or should not be cleared (0), when orientation is changed.
+:type clear: int
+
 """
         if (self.orientation() == 'landscape'):
             return
@@ -4287,156 +4193,119 @@ Options:::
 
         return l
 
-    ##########################################################################
-    #                                                                           #
-    # List Primary and Secondary elements wrapper for VCS.                      #
-    #                                                                           #
-    ##########################################################################
     def listelements(self, *args):
         """
- Function: listelements
+        Returns a Python list of all the VCS class objects.
 
- Description of Function:
-    Returns a Python list of all the VCS class objects.
+        The list that will be returned:
+        ['1d', '3d_dual_scalar', '3d_scalar', '3d_vector', 'boxfill', 'colormap', 'display', 'fillarea',
+         'font', 'fontNumber', 'isofill', 'isoline', 'line', 'list', 'marker', 'meshfill', 'projection',
+         'scatter', 'taylordiagram', 'template', 'textcombined', 'textorientation', 'texttable',
+         'vector', 'xvsy', 'xyvsy', 'yxvsx']
 
-   The list that will be returned:
-   ['template', 'boxfill', 'isofill', 'isoline',
-    'scatter', 'vector', 'xvsy', 'xyvsy', 'yxvsx', 'colormap', 'fillarea', 'format',
-    'line', 'list', 'marker', 'text']
+         :Example:
+
+::
+
+        a=vcs.init()
+        a.listelements()
+
+:returns: A list of string names of all VCS class objects
+:rtype: list
 
- Example of Use:
-    a=vcs.init()
-    a.listelements()
 """
         f = vcs.listelements
         L = sorted(f(*args))
 
         return L
 
-    ##########################################################################
-    #                                                                           #
-    # update VCS's Canvas orientation wrapper for VCS.                          #
-    #                                                                           #
-    ##########################################################################
     def updateorientation(self, *args):
         """
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    x.updateorientation()
+    a.updateorientation()
 """
 
         a = self.canvas.updateorientation(*args)
 
         return a
 
-    ##########################################################################
-    #                                                                           #
-    # Open VCS Canvas wrapper for VCS.                                          #
-    #                                                                           #
-    ##########################################################################
     def open(self, width=None, height=None, **kargs):
         """
- Function: open
-
- Description of Function:
     Open VCS Canvas object. This routine really just manages the VCS canvas. It will
     popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.open()
     a.open(800,600)
+
+:param width: Integer representing the desire width of the opened window in pixels
+:type width: int
+
+:param height: Integer representing the desire height of the opened window in pixels
+:type height: int
+
 """
 
         a = self.backend.open(width, height, **kargs)
 
         return a
 
-    ##########################################################################
-    #                                                                           #
-    # Return VCS Canvas ID.                                                     #
-    #                                                                           #
-    ##########################################################################
     def canvasid(self, *args):
         '''
- Function: canvasid
+        Get the ID of this canvas.
 
- Description of Function:
-    Return VCS Canvas object ID. This ID number is found at the top of the VCS Canvas
-    as part of its title.
-
- Example of Use:
-    a=vcs.init()
-    a.open()
-    id = a.canvasid()
-'''
+        This ID number is found at the top of the VCS Canvas, as part of its title.
+        '''
         return self._canvas_id
 
-    ##########################################################################
-    #                                                                           #
-    # Connect the VCS Canvas to the GUI.                                        #
-    #                                                                           #
-    ##########################################################################
-    def _connect_gui_and_canvas(self, *args):
-        return self.canvas.connect_gui_and_canvas(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Page VCS Canvas orientation ('portrait' or 'landscape') wrapper for VCS.  #
-    #                                                                           #
-    ##########################################################################
-    def page(self, *args):
-        """
- Function: page
-
- Description of Function:
-    Change the VCS Canvas orientation to either 'portrait' or 'landscape'.
-
-    The orientation of the VCS Canvas and of cgm and raster images is controlled by
-    the PAGE command. Only portrait (y > x) or landscape (x > y) orientations are
-    permitted.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.page()      # Change the VCS Canvas orientation and set object flag to portrait
-"""
-
-        l = self.canvas.page(*args)
-
-        return l
-
-    ##########################################################################
-    #                                                                           #
-    # Portrait VCS Canvas orientation wrapper for VCS.                          #
-    #                                                                           #
-    ##########################################################################
     def portrait(self, width=-99, height=-99, x=-99, y=-99, clear=0):
         """
- Function: portrait
+        Change the VCS Canvas orientation to Portrait.
 
- Description of Function:
-    Change the VCS Canvas orientation to Portrait.
+        If the current orientation of the canvas is already portrait, nothing happens.
 
-     Note: the (width, height) and (x, y) arguments work in pairs. That is, you must
-           set (width, height) or (x, y) together to see any change in the VCS Canvas.
-
-           If the portrait method is called  with arguments before displaying a VCS Canvas,
-           then the arguments (width, height, x, y, and clear) will have no effect on the
-           canvas.
+        :Example:
 
-     Known Bug: If the visible plot on the VCS Canvas is not adjusted properly, then resize
-                the screen with the point. Some X servers are not handling the threads properly
-                to keep up with the demands of the X client.
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array)
-    a.portrait()      # Change the VCS Canvas orientation and set object flag to portrait
-    a.portrait(clear=1) # Change the VCS Canvas to portrait and clear the page
-    a.portrait(width = 337, height = 400) # Change to portrait and set the window size
-    a.portrait(x=100, y = 200) # Change to portrait and set the x and y screen position
-    a.portrait(width = 337, height = 400, x=100, y = 200, clear=1) # Chagne to portrait and give specifications
+    # Change the VCS Canvas orientation and set object flag to portrait
+    a.portrait()
+    # Change the VCS Canvas to portrait and clear the page
+    a.portrait(clear=1)
+    # Change to portrait and set the window size
+    a.portrait(width = 337, height = 400)
+    # Change to portrait and set the x and y screen position
+    a.portrait(x=100, y = 200)
+    # Change to portrait and give specifications
+    a.portrait(width = 337, height = 400, x=100, y = 200, clear=1)
+
+:param width: Width to set the canvas to (in pixels)
+:type width: int
+
+:param height: Height to set the canvas to (in pixels)
+:type height: int
+
+:param x: Unused.
+:type x: None
+
+:param y: Unused.
+:type y: None
+
+:param clear: 0: Do not clear the canvas when orientation is changed. 1: clear the canvas when orientation is changed.
+:type clear: int
+
+:returns: ???
+:rtype: ???
 """
         if (self.orientation() == 'portrait'):
             return
@@ -4468,37 +4337,52 @@ Options:::
 
         return p
 
-    ##########################################################################
-    #                                                                        #
-    # png wrapper for VCS.                                                   #
-    #                                                                        #
-    ##########################################################################
     def ffmpeg(self, movie, files, bitrate=1024, rate=None, options=None):
         """
- Function: ffmpeg
-
- Description of Function:
     MPEG output from a list of valid files.
-    Note that ffmpeg is smart enough to output to more than just mpeg format
+    Can output to more than just mpeg format.
+
+    Note: ffmpeg ALWAYS overwrites the output file
+
+    :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     #... code to generate png files ...
-    # here is dummy example
+    # here is a dummy example
     files =[]
     for i in range(10):
-      x.png('my_png__%i' % i)
+      a.png('my_png__%i' % i)
       files.append('my_png__%i.png' % i)
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png') # generates mpeg from pattern
-    x.ffmpeg('mymovie.mpeg',files) # generates from list of files
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png',bitrate=512) # generates mpeg at 512kbit
-                                 bitrate (bitrate is important to movie quality)
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png',rate=50) # generates movie with 50 frame per second
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png',options='-r 50 -b 1024k')
-    # genrats movie at 50 frame per sec and 1024k bitrate
-    NOTE : via the optins arg you can add audio file to your movie (see ffmpeg help)
-    returns the output string generated by ffmpeg program
-    ALWAYS overwrite output file
+    # generates mpeg from pattern
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png')
+    # generates from list of files
+    a.ffmpeg('mymovie.mpeg',files)
+    # generates mpeg at 512kbit
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png',bitrate=512)
+    # generates movie with 50 frame per second
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png',rate=50)
+    # generates movie at 50 frame per sec and 1024k bitrate
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png',options='-r 50 -b 1024k')
+
+    NOTE : via the options arg you can add audio file to your movie (see ffmpeg help)
+
+:param movie: ???
+:type movie: ???
+
+:param files: String file name
+:type files: str, list, or tuple
+
+:param rate: Desired output framerate
+:type rate: str
+
+:param options: ???
+:type options: ???
+
+:returns: The output string generated by ffmpeg program
+:rtype: str
+
 """
         args = ["ffmpeg", "-y"]
 
@@ -4582,26 +4466,37 @@ Options:::
         return self.backend.getantialiasing()
 
     def setantialiasing(self, antialiasing):
-        """ Turn ON/OFF antialiasing"""
+        """ Set antialiasing rate.
+
+:param antialiasing: Integer from 0-64, representing the antialising rate (0 means no antialiasing).
+:type antialiasing: int
+        """
         self.backend.setantialiasing(antialiasing)
 
-    ##########################################################################
-    #                                                                        #
-    # bg dims wrapper for VCS.                                               #
-    #                                                                        #
-    ##########################################################################
     def setbgoutputdimensions(self, width=None, height=None, units='inches'):
         """
- Function: setbgoutputdimensions
+        Sets dimensions for output in bg mode.
 
- Description of Function:
-    Sets dimensions for output in bg mode.
+         :Example:
 
- Example of Use:
-    a=vcs.init()
-    a.setbgoutputdimensions(width=11.5, height= 8.5)  # US Legal
-    a.setbgoutputdimensions(width=21, height=29.7, units='cm')  # A4
-"""
+::
+
+            a=vcs.init()
+            # US Legal
+            a.setbgoutputdimensions(width=11.5, height= 8.5)
+            # A4
+            a.setbgoutputdimensions(width=21, height=29.7, units='cm')
+
+:param width: Float representing the desired width of the output, using the specified unit of measurement
+:type width: float
+
+:param height: Float representing the desired height of the output, using the specified unit of measurement.
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+        """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
             raise Exception(
@@ -4626,23 +4521,35 @@ Options:::
             *args,
             **kargs)
 
-    ##########################################################################
-    #                                                                        #
-    # png wrapper for VCS.                                                   #
-    #                                                                        #
-    ##########################################################################
     def png(self, file, width=None, height=None,
             units=None, draw_white_background=True, **args):
         """
- Function: png
-
- Description of Function:
     PNG output, dimensions set via setbgoutputdimensions
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array)
-    a.png('example')       # Overwrite a png file
+    # Overwrite a png file
+    a.png('example')
+
+:param file: A string containing the path to ???
+:type file: str
+
+:param width: Float representing the desired width of the output png, using the specified unit of measurement
+:type width: float
+
+:param height: Float representing the desired height of the output png, using the specified unit of measurement.
+               Must be one of ['inches', 'in', 'cm', 'mm', None, 'pixel', 'pixels', 'dot', 'dots'].
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+:param draw_white_background: Boolean value indicating whether or no the background should be white. Defaults to True.
+:type draw_white_background: bool
 """
         base = os.path.dirname(file)
         if base != "" and not os.path.exists(base):
@@ -4658,24 +4565,41 @@ Options:::
         return self.backend.png(
             file, W, H, units, draw_white_background, **args)
 
-    ##########################################################################
-    #                                                                           #
-    # pdf wrapper for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
-    def pdf(self, file, width=None, height=None, units='inches'):
+    def pdf(self, file, width=None, height=None, units='inches',
+            textAsPaths=True):
         """
- Function: postscript
+    PDF output is another form of vector graphics.
 
- Description of Function:
-    SVG output is another form of vector graphics.
+    :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array)
-    a.pdf('example')       # Overwrite a postscript file
-    a.pdf('example', width=11.5, height= 8.5)  # US Legal
-    a.pdf('example', width=21, height=29.7, units='cm')  # A4
+    # Overwrite a postscript file
+    a.pdf('example')
+    # US Legal
+    a.pdf('example', width=11.5, height= 8.5)
+    # A4
+    a.pdf('example', width=21, height=29.7, units='cm')
+
+:param file: Desired string name of the output file
+:type file: str
+
+:param width: Integer specifying the desired width of the output, measured in the chosen units
+:type width: int
+
+:param height: Integer specifying the desired height of the output, measured in the chosen units
+:type height: int
+
+:param units: Must be one of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Default is 'inches'.
+:type units: str
+
+:param textAsPaths: ???
+:type textAsPaths: bool
+
+:returns: ???
+:rtype: ???
 """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
@@ -4687,26 +4611,43 @@ Options:::
 
         if not file.split('.')[-1].lower() in ['pdf']:
             file += '.pdf'
-        return self.backend.pdf(file, W, H)
-    ##########################################################################
-    #                                                                           #
-    # SVG wrapper for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
-
-    def svg(self, file, width=None, height=None, units='inches'):
-        """
- Function: postscript
+        return self.backend.pdf(file, W, H, textAsPaths)
 
- Description of Function:
+    def svg(self, file, width=None, height=None, units='inches',
+            textAsPaths=True):
+        """
     SVG output is another form of vector graphics.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array)
-    a.svg('example')       # Overwrite a postscript file
-    a.svg('example', width=11.5, height= 8.5)  # US Legal
-    a.svg('example', width=21, height=29.7, units='cm')  # A4
+    # Overwrite a postscript file
+    a.svg('example')
+    # US Legal
+    a.svg('example', width=11.5, height= 8.5)
+    # A4
+    a.svg('example', width=21, height=29.7, units='cm')
+
+:param file:
+:type file:
+
+:param width: Float to set width of output SVG, in specified unit of measurement
+:type width: float
+
+:param height: Float to set height of output SVG, in specified unit of measurement
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Deafults to 'inches'.
+:type units: str
+
+:param textAsPaths: ???
+:type textAsPaths: bool
+
+:returns: ???
+:rtype: ???
 """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
@@ -4718,7 +4659,7 @@ Options:::
 
         if not file.split('.')[-1].lower() in ['svg']:
             file += '.svg'
-        return self.backend.svg(file, W, H)
+        return self.backend.svg(file, W, H, textAsPaths)
 
     def _compute_margins(
             self, W, H, top_margin, bottom_margin, right_margin, left_margin, dpi):
@@ -4818,7 +4759,13 @@ Options:::
         return top_margin, bottom_margin, right_margin, left_margin
 
     def isopened(self):
-        """Is the Canvas opened?"""
+        """
+        Is the Canvas opened?
+
+:returns: A boolean value indicating whether the Canvas is opened (1), or closed (0)
+:rtype: bool
+        """
+
         return self.backend.isopened()
 
     def _compute_width_height(self, width, height, units, ps=False):
@@ -4894,11 +4841,8 @@ Options:::
         return W, H
 
     def postscript(self, file, mode='r', orientation=None, width=None, height=None,
-                   units='inches'):
+                   units='inches', textAsPaths=True):
         """
- Function: postscript
-
- Description of Function:
     Postscript output is another form of vector graphics. It is larger than its CGM output
     counter part, because it is stored out in ASCII format.
 
@@ -4908,17 +4852,50 @@ Options:::
     postscript file (i.e. mode (r)).
 
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array)
-    a.postscript('example')       # Overwrite a postscript file
-    a.postscript('example', 'a')  # Append postscript to an existing file
-    a.postscript('example', 'r')  # Overwrite an existing file
-    a.postscript('example', mode='a')  # Append postscript to an existing file
-    a.postscript('example', width=11.5, height= 8.5)  # US Legal (default)
-    a.postscript('example', width=21, height=29.7, units='cm')  # A4
-    a.postscript('example', right_margin=.2,left_margin=.2,top_margin=.2,bottom_margin=.2)
+    # Overwrite a postscript file
+    a.postscript('example')
+    # Append postscript to an existing file
+    a.postscript('example', 'a')
+    # Overwrite an existing file
+    a.postscript('example', 'r')
+    # Append postscript to an existing file
+    a.postscript('example', mode='a')
+    # US Legal (default)
+    a.postscript('example', width=11.5, height= 8.5)
+    # A4
+    a.postscript('example', width=21, height=29.7, units='cm')
     # US Legal output and control of margins (for printer friendly output), default units 'inches'
+    a.postscript('example', right_margin=.2,left_margin=.2,top_margin=.2,bottom_margin=.2)
+
+:param file: String name of the desired output file
+:type file: str
+
+:param mode: The mode in which to open the file. One of 'r' or 'a'.
+:type mode: str
+
+:param orientation: Deprecated.
+:type orientation: None
+
+:param width: Desired width of the postscript output, in the specified unit of measurement
+:type width: int
+
+:param height: Desired height of the postscript output, in the specified unit of measurement
+:type height: int
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+:param textAsPaths: ???
+:type textAsPaths: ???
+
+:returns: ???
+:rtype: ???
 """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
@@ -4933,7 +4910,7 @@ Options:::
         if not file.split('.')[-1].lower() in ['ps', 'eps']:
             file += '.ps'
         if mode == 'r':
-            return self.backend.postscript(file, W, H, units="pixels")
+            return self.backend.postscript(file, W, H, units="pixels", textAsPaths=textAsPaths)
         else:
             n = random.randint(0, 10000000000000)
             psnm = '/tmp/' + '__VCS__tmp__' + str(n) + '.ps'
@@ -4949,167 +4926,33 @@ Options:::
             else:
                 shutil.move(psnm, file)
 
-    ##########################################################################
-    #                                                                           #
-    # Showbg wrapper for VCS.                                                   #
-    #                                                                           #
-    ##########################################################################
-    def showbg(self, *args):
-        """
- Function: showbg
-
- Description of Function:
-    This function displays graphics segments, which are currently stored in the frame buffer,
-    on the VCS Canvas. That is, if the plot function was called with the option bg = 1 (i.e.,
-    background mode), then the plot is produced in the frame buffer and not visible to the
-    user. In order to view  the graphics segments, this function will copy the contents of
-    the frame buffer to the VCS Canvas, where the graphics can be viewed by the user.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array, bg=1)
-    x.showbg()
-"""
-        a = self.canvas.showbg(*args)
-
-        return a
-
-    ##########################################################################
-    #                                                                           #
-    # Backing Store wrapper for VCS.                                            #
-    #                                                                           #
-    ##########################################################################
-    def backing_store(self, *args):
-        """
- Function: backing_store
-
- Description of Function:
-    This function creates a backing store pixmap for the VCS Canvas.
-
- Example of Use:
-    a=vcs.init()
-    a.backing_store()
-"""
-        return self.canvas.backing_store(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Update the animation slab. Used only for the VCS Canvas GUI.              #
-    #                                                                           #
-    ##########################################################################
-    def update_animation_data(self, *args):
-        return self.canvas.update_animation_data(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Return the dimension information. Used only for the VCS Canvas GUI.       #
-    #                                                                           #
-    ##########################################################################
-    def return_dimension_info(self, *args):
-        return self.canvas.return_dimension_info(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Raster wrapper for VCS.                                                   #
-    #                                                                           #
-    ##########################################################################
-    def raster(self, file, mode='a'):
-        """
- Function: raster
-
- Description of Function:
-    In some cases, the user may want to save the plot out as an raster image. This
-    routine allows the user to save the VCS canvas output as a SUN raster file.
-    This file can be converted to other raster formats with the aid of xv and other
-    such imaging tools found freely on the web.
-
-    If no path/file name is given and no previously created raster file has been
-    designated, then file
-
-    /$HOME/%s/default.ras
-
-    will be used for storing raster images. However, if a previously created raster
-    file is designated, that file will be used for raster output.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.raster('example','a')   # append raster image to existing file
-    a.raster('example','r')   # overwrite existing raster file
-    a.raster(file='example',mode='r')   # overwrite existing raster file
-""" % (self._dotdir)
-        return self.canvas.raster(*(file, mode))
-
-    ##########################################################################
-    #                                                                           #
-    # Reset grid wrapper for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
-    def resetgrid(self, *args):
-        """
- Function: resetgrid
-
- Description of Function:
-    Set the plotting region to default values.
-
- Example of Use:
-    Not Working!
-"""
-        return self.canvas.resetgrid(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Script wrapper for VCS.                                                   #
-    #                                                                           #
-    ##########################################################################
     def _scriptrun(self, *args):
         return vcs._scriptrun(*args)
 
     def scriptrun(self, aFile, *args, **kargs):
         vcs.scriptrun(aFile, *args, **kargs)
 
-    ##########################################################################
-    #                                                                           #
-    # Set default graphics method and template wrapper for VCS.                 #
-    #                                                                           #
-    ##########################################################################
-    def set(self, *args):
-        """
- Function: set
-
- Description of Function:
-    Set the default VCS primary class objects: template and graphics methods.
-    Keep in mind the template, determines the appearance of each graphics segment;
-    the graphic method specifies the display technique; and the data defines what
-    is to be displayed. Note, the data cannot be set with this function.
-
- Example of Use:
-    a=vcs.init()
-    a.set('isofill','quick') # Changes the default graphics method to Isofill: 'quick'
-    a.plot(array)
-"""
-        return self.canvas.set(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Set VCS color map wrapper for VCS.                                        #
-    #                                                                           #
-    ##########################################################################
     def setcolormap(self, name):
         """
- Function: setcolormap
-
- Description of Function:
     It is necessary to change the colormap. This routine will change the VCS
     color map.
 
     If the the visul display is 16-bit, 24-bit, or 32-bit TrueColor, then a redrawing
     of the VCS Canvas is made evertime the colormap is changed.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.setcolormap("AMIP")
+
+:param name: ???
+:type name: ???
+
+:returns: ???
+:rtype: ???
 """
         # Don't update the VCS segment if there is no Canvas. This condition
         # happens in the initalize function for VCDAT only. This will cause a
@@ -5122,16 +4965,8 @@ Options:::
         self.update()
         return
 
-    ##########################################################################
-    #                                                                           #
-    # Set VCS color map cell wrapper for VCS.                                   #
-    #                                                                           #
-    ##########################################################################
     def setcolorcell(self, *args):
         """
- Function: setcolorcell
-
- Description of Function:
     Set a individual color cell in the active colormap. If default is
     the active colormap, then return an error string.
 
@@ -5142,7 +4977,10 @@ Options:::
     value must range from 0 to 100. Where 0 represents no color intensity
     and 100 is the greatest color intensity.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.setcolormap("AMIP")
@@ -5158,20 +4996,15 @@ Options:::
         a = vcs.setcolorcell(self.colormap, *args)
         return a
 
-    ##########################################################################
-    #                                                                        #
-    # Set continents line wrapper for VCS.                                   #
-    #                                                                        #
-    ##########################################################################
     def setcontinentsline(self, line="default"):
         """
-    Function: setcontinentsline
-
-    Description of Function:
         One has the option of configuring the appearance of the lines used to
         draw continents by providing a VCS Line object.
 
-    Example of Use:
+        :Example:
+
+::
+
         a = vcs.init()
         line = vcs.createline()
         line.width = 5
@@ -5179,6 +5012,12 @@ Options:::
         a.setcontinentsline(line)
         # Use default line
         a.setcontinentsline("default")
+
+:param line: ???
+:type line: str
+
+:returns: ???
+:rtype: ???
         """
         linename = VCS_validation_functions.checkLine(self, "continentsline", line)
         line = vcs.getline(linename)
@@ -5190,16 +5029,8 @@ Options:::
         else:
             return self._continents_line
 
-    ##########################################################################
-    #                                                                        #
-    # Set continents type wrapper for VCS.                           		 #
-    #                                                                        #
-    ##########################################################################
     def setcontinentstype(self, value):
         """
-   Function: setcontinentstype
-
-   Description of Function:
       One has the option of using continental maps that are predefined or that
       are user-defined. Predefined continental maps are either internal to VCS
       or are specified by external files. User-defined continental maps are
@@ -5217,10 +5048,19 @@ Options:::
 
       You can also pass a file by path.
 
-   Example of Use:
+      :Example:
+
+::
+
       a=vcs.init()
       a.setcontinentstype(3)
       a.plot(array,'default','isofill','quick')
+
+:param value: Integer representing continent type, as specified in function description
+:type value: int
+
+:returns: ???
+:rtype: ???
   """
         continent_path = VCS_validation_functions.checkContinents(self, value)
         self._continents = value
@@ -5242,11 +5082,6 @@ Options:::
         except:
             return VCS_validation_functions.checkContinents(self, 1)
 
-    ##########################################################################
-    #                                                                           #
-    # Screen GIF wrapper for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
     def gif(self, filename='noname.gif', merge='r', orientation=None,
             geometry='1600x1200'):
         """
@@ -5274,7 +5109,7 @@ Options:::
     file; `Replace' (r) mode overwrites an existing gif file with new gif output.
     The default mode is to overwrite an existing gif file (i.e. mode (r)).
 
- Example of Use:
+ :Example:
     a=vcs.init()
     a.plot(array)
     a.gif(filename='example.gif', merge='a', orientation='l', geometry='800x600')
@@ -5294,86 +5129,63 @@ Options:::
         nargs = ('gif', filename, merge, orientation, geometry)
         return self.backend.gif(nargs)
 
-    ##########################################################################
-    #                                                                           #
-    # Screen GhostScript (gs) wrapper for VCS.                                  #
-    #                                                                           #
-    ##########################################################################
     def gs(self, filename='noname.gs', device='png256',
            orientation=None, resolution='792x612'):
-        """
- Function: gs
 
- Description of Function:
-    This routine allows the user to save the VCS canvas in one of the many
-    GhostScript (gs) file types (also known as devices). To view other
-    GhostScript devices, issue the command "gs --help" at the terminal
-    prompt. Device names include: bmp256, epswrite, jpeg, jpeggray,
-    pdfwrite, png256, png16m, sgirgb, tiffpack, and tifflzw. By default
-    the device = 'png256'.
-
-    If no path/file name is given and no previously created gs file has been
-    designated, then file
-
-        /$HOME/%s/default.gs
-
-    will be used for storing gs images. However, if a previously created gs
-    file exist, then this output file will be used for storage.
-
-    By default, the page orientation is the canvas' orientation.
-    To translate the page orientation to portrait mode (p), set the parameter orientation = 'p'.
-    To translate the page orientation to landscape mode (l), set the parameter orientation = 'l'.
+        warnings.warn("Export to GhostScript is no longer supported", DeprecationWarning)
 
-    The gs command is used to create a single gs file at this point. The user
-    can use other tools to append separate image files.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.gs('example') #defaults: device='png256', orientation='l' and resolution='792x612'
-    a.gs(filename='example.tif', device='tiffpack', orientation='l', resolution='800x600')
-    a.gs(filename='example.pdf', device='pdfwrite', orientation='l', resolution='200x200')
-    a.gs(filename='example.jpg', device='jpeg', orientation='p', resolution='1000x1000')
-""" % (self._dotdir)
-        if orientation is None:
-            orientation = self.orientation()[0]
-        r = resolution.split('x')
-        f1 = f1 = float(r[0]) / 1100.0 * 100.0
-        f2 = f2 = float(r[1]) / 849.85 * 100.0
-        resolution = "%4.1fx%4.1f" % (f2, f1)
-        nargs = (filename, device, orientation, resolution)
-        return self.canvas.gs(*nargs)
-
-    ##########################################################################
-    #                                                                           #
-    # Screen Encapsulated PostScript wrapper for VCS.                           #
-    #                                                                           #
-    ##########################################################################
-    def eps(self, file, mode='r', orientation=None, width=None, height=None, units='inches',
-            left_margin=None, right_margin=None, top_margin=None, bottom_margin=None):
+    def eps(self, file, mode='r', orientation=None, width=None, height=None,
+            units='inches', textAsPaths=True):
         """
-        Function: Encapsulated PostScript
-
-        Description of Function:
         In some cases, the user may want to save the plot out as an Encapsulated
         PostScript image. This routine allows the user to save the VCS canvas output
         as an Encapsulated PostScript file.
         This file can be converted to other image formats with the aid of xv and other
         such imaging tools found freely on the web.
 
+        :Example:
+
+::
 
-        Example of Use:
         a=vcs.init()
         a.plot(array)
-        a.postscript('example')       # Overwrite a postscript file
-        a.postscript('example', 'a')  # Append postscript to an existing file
-        a.postscript('example', 'r')  # Overwrite an existing file
-        a.postscript('example', mode='a')  # Append postscript to an existing file
-        a.postscript('example', width=11.5, height= 8.5)  # US Legal (default)
-        a.postscript('example', width=21, height=29.7, units='cm')  # A4
+        # Overwrite a postscript file
+        a.postscript('example')
+        # Append postscript to an existing file
+        a.postscript('example', 'a')
+        # Overwrite an existing file
+        a.postscript('example', 'r')
+        # Append postscript to an existing file
+        a.postscript('example', mode='a')
+         # US Legal (default)
+        a.postscript('example', width=11.5, height= 8.5)
+        # A4
+        a.postscript('example', width=21, height=29.7, units='cm')
         a.postscript('example', right_margin=.2,left_margin=.2,top_margin=.2,bottom_margin=.2)
         # US Legal output and control of margins (for printer friendly output), default units 'inches'
-        """
+
+
+:param file: String name of the desired output file
+:type file: str
+
+:param mode: The mode in which to open the file. One of 'r' or 'a'.
+:type mode: str
+
+:param orientation: Deprecated.
+:type orientation: None
+
+:param width: Width of the output image, in the unit of measurement specified
+:type width: float
+
+:param height: Height of the output image, in the unit of measurement specified
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+:returns: ???
+:rtype: ???
+"""
         ext = file.split(".")[-1]
         if ext.lower() != 'eps':
             file = file + '.eps'
@@ -5388,30 +5200,30 @@ Options:::
             width,
             height,
             units,
-            left_margin,
-            right_margin,
-            top_margin,
-            bottom_margin)
+            textAsPaths)
+
         os.popen("ps2epsi %s %s" % (tmpfile, file)).readlines()
         os.remove(tmpfile)
 
-    ##########################################################################
-    #                                                                           #
-    # Show VCS primary and secondary elements wrapper for VCS.                  #
-    #                                                                           #
-    ##########################################################################
     def show(self, *args):
         return vcs.show(*args)
     show.__doc__ = vcs.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Look if a graphic method is in a file           .                         #
-    #                                                                           #
-    ##########################################################################
     def isinfile(self, GM, file=None):
-        """ Checks if a graphic method is stored in a file
-        if no file name is passed then looks into the initial.attributes file"""
+        """
+        Checks if a graphic method is stored in a file
+        if no file name is passed then looks into the initial.attributes file
+
+:param GM: The graphics method to search for
+:type GM: ???
+
+:param file: String name of the file to search
+:type file: str
+
+:returns: ???
+:rtype: ???
+
+        """
         nm = GM.name
         gm = GM.g_name
         key = gm + '_' + nm + '('
@@ -5425,12 +5237,7 @@ Options:::
             if ln.find(key) > -1:
                 f.close()
                 return 1
-        return 0
-    ##########################################################################
-    #                                                                           #
-    # Save VCS initial.attribute file  wrapper for VCS.                         #
-    #                                                                           #
-    ##########################################################################
+        return
 
     def saveinitialfile(self):
         """
@@ -5448,7 +5255,7 @@ Options:::
     The contents of the initial.attributes file can be customized by
     the user.
 
- Example of Use:
+ :Example:
     a=vcs.init()
     ...
 
@@ -5465,123 +5272,85 @@ Options:::
         self.clean_auto_generated_objects()
         return vcs.saveinitialfile()
 
-    ##########################################################################
-    #                                                                           #
-    # Raise VCS Canvas to the top of all its siblings.                          #
-    #                                                                           #
-    ##########################################################################
-    def canvasraised(self, *args):
+    def raisecanvas(self, *args):
         """
- Function: canvasraised                         # Raise the VCS Canvas to the top
-
- Description of Function:
-    This function marks a VCS Canvas as eligible to be displayed and
-    positions the window at the top of the stack of its siblings.
-
- Example of Use:
-    a=vcs.init()
-    ...
-
-    a.canvasraised()
-"""
-
-        return self.backend.canvasraised(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Returns 1 if a VCS Canvas is displayed on the screen. Returns a 0 if no   #
-    # VCS Canvas is displayed on the screen.                                    #
-    #                                                                           #
-    ##########################################################################
-    def iscanvasdisplayed(self, *args):
+        Raise the VCS Canvas to the top of all open windows.
         """
- Function: iscanvasdisplayed          # Return 1 if a VCS Canvas is displayed
-
- Description of Function:
-    This function returns a 1 if a VCS Canvas is displayed or a 0 if
-    no VCS Canvas is displayed on the screen.
-
- Example of Use:
-    a=vcs.init()
-    ...
-
-    a.iscanvasdisplayed()
-"""
+        return self.backend.raisecanvas(*args)
 
-        return self.canvas.iscanvasdisplayed(*args)
-
-    ##########################################################################
-    #                                                                           #
-    # Is VCS's orientation landscape?                                           #
-    #                                                                           #
-    ##########################################################################
     def islandscape(self):
         """
- Function: islandscape
-
- Description of Function:
     Indicates if VCS's orientation is landscape.
 
     Returns a 1 if orientation is landscape.
     Otherwise, it will return a 0, indicating false (not in landscape mode).
 
- Example of Use:
-    a=vcs.init()
-    ...
+    :Example:
 
+::
+
+    a=vcs.init()
+    # ...
     if a.islandscape():
-       a.portrait()               # Set VCS's orientation to portrait mode
+        # Set VCS's orientation to portrait mode
+       a.portrait()
+
+:returns: Boolean indicating VCS is in landscape mode (1), or not (0)
+:rtype: bool
 """
         if (self.orientation() == 'landscape'):
             return 1
         else:
             return 0
 
-    ##########################################################################
-    #                                                                           #
-    # Is VCS's orientation portrait?                                            #
-    #                                                                           #
-    ##########################################################################
     def isportrait(self):
         """
- Function: isportrait
-
- Description of Function:
     Indicates if VCS's orientation is portrait.
 
-    Returns a 1 if orientation is portrait.
-    Otherwise, it will return a 0, indicating false (not in portrait mode).
 
- Example of Use:
-    a=vcs.init()
-    ...
+    :Example:
+
+::
 
+    a=vcs.init()
+    #...
     if a.isportrait():
-       a.landscape()               # Set VCS's orientation to landscape mode
+        # Set VCS's orientation to landscape mode
+        a.landscape()
+
+:returns: Returns a 1 if orientation is portrait, or 0 if not in portrait mode
+:rtype: bool
+
 """
         if (self.orientation() == 'portrait'):
             return 1
         else:
             return 0
-    ##########################################################################
-    #                                                                           #
-    # Dislplay plot functions for VCS.                                          #
-    #                                                                           #
-    ##########################################################################
 
     def getplot(self, Dp_name_src='default', template=None):
         """
- Function: getplot                  # Get existing display plot
-
- Description of Function:
     This function will create a display plot object from an existing display
     plot object from an existing VCS plot. If no display plot name
     is given, then None is returned.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    a.show('template')                  # Show all the existing templates
-    plot1=a.getplot('dpy_plot_1')       # plot1 instance of 'dpy_plot_1' display plot
+    # Show all the existing templates
+    a.show('template')
+    # plot1 instance of 'dpy_plot_1' display plot
+    plot1=a.getplot('dpy_plot_1')
+
+:param Dp_name_src: String name of an existing display plot object
+:type Dp_name_src: str
+
+:param template: ???
+:type template: ???
+
+:returns: ???
+:rtype: ???
 """
         if not isinstance(Dp_name_src, str):
             raise ValueError('Error -  The argument must be a string.')
@@ -5592,11 +5361,6 @@ Options:::
             display._template_origin = template
         return display
 
-    ##########################################################################
-    #                                                                           #
-    # Colormap functions for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
     def createcolormap(self, Cp_name=None, Cp_name_src='default'):
         return vcs.createcolormap(Cp_name, Cp_name_src)
     createcolormap.__doc__ = vcs.manageElements.createcolormap.__doc__
@@ -5605,15 +5369,16 @@ Options:::
         return vcs.getcolormap(Cp_name_src)
     getcolormap.__doc__ = vcs.manageElements.getcolormap.__doc__
 
-    ##########################################################################
-    #                                                                           #
-    # Font functions.                       #
-    #                                                                           #
-    ##########################################################################
     def addfont(self, path, name=""):
         """
-        Add a font to VCS, path then a name you'd like to associate it with
-        """
+        Add a font to VCS.
+
+    :param path: Path to the font file you wish to add (must be .ttf)
+    :type path: str
+
+    :param name: Name to use to represent the font.
+    :type name: str
+"""
         if not os.path.exists(path):
             raise ValueError('Error -  The font path does not exists')
         if os.path.isdir(path):
@@ -5647,20 +5412,19 @@ Options:::
             return nms[0]
 
     def getfontnumber(self, name):
-        """
-        get the font number associated with a font name
-        """
         return vcs.getfontnumber(name)
+    getfontnumber.__doc__ = vcs.utils.getfontnumber.__doc__
 
     def getfontname(self, number):
-        """
-        get the font name associated with a font number
-        """
         return vcs.getfontname(number)
+    getfontname.__doc__ = vcs.utils.getfontname.__doc__
 
     def getfont(self, font):
         """
-        get the font name/number associated with a font number/name
+        Get the font name/number associated with a font number/name
+
+        :param font: The font name/number
+        :type font: int or str
         """
         if isinstance(font, int):
             return self.getfontname(font)
@@ -5670,7 +5434,14 @@ Options:::
             raise vcsError("Error you must pass a string or int")
 
     def switchfonts(self, font1, font2):
-        """ Switch 2 font indexes, you can pass either the font names or indexes """
+        """
+        Switch the font numbers of two fonts.
+
+        :param font1: The first font
+        :type font1: int or str
+        :param font2: The second font
+        :type font2: int or str
+        """
         if isinstance(font1, str):
             index1 = self.getfont(font1)
         elif isinstance(font1, (int, float)):
@@ -5693,7 +5464,15 @@ Options:::
         return self.canvas.switchfontnumbers(*(index1, index2))
 
     def copyfontto(self, font1, font2):
-        """ copy name and path of font 1 into font 2, you can pass either the font names or indexes """
+        """
+        Copy `font1` into `font2`.
+
+:param font1: Name/number of font to copy
+:type font1: str or int
+
+:param font2: Name/number of destination
+:type font2: str or int
+"""
         if isinstance(font1, str):
             index1 = self.getfont(font1)
         elif isinstance(font1, (int, float)):
@@ -5715,81 +5494,49 @@ Options:::
         return self.canvas.copyfontto(*(index1, index2))
 
     def setdefaultfont(self, font):
-        """Sets the passed font as the default font for vcs"""
+        """
+        Sets the passed/def show font as the default font for vcs
+
+:param font: Font name or index to use as default
+:type font: str or int
+
+
+        """
         if isinstance(font, str):
             font = self.getfont(font)
         return self.copyfontto(font, 1)
 
-    ##########################################################################
-    #                                                                           #
-    # Orientation VCS Canvas orientation wrapper for VCS.                       #
-    #                                                                           #
-    ##########################################################################
     def orientation(self, *args, **kargs):
         """
- Function: orientation
+        Return canvas orientation.
 
- Description of Function:
-    Return VCS's orientation. Will return either Portrait or Landscape.
+        The current implementation does not use any args or kargs.
 
- Example of Use:
-    a=vcs.init()
-    a.orientation()      # Return either "landscape" or "portrait"
-"""
+        :Example:
+
+::
+        a = vcs.init()
+        # Show current orientation of the canvas
+        a.orientation()
+
+:returns: A string indicating the orientation of the canvas, i.e. 'landscape' or 'portrait'
+:rtype: str
+        """
         return self.backend.orientation(*args, **kargs)
 
-    ##########################################################################
-    #                                                                           #
-    # Get VCS color map cell wrapper for VCS.                                   #
-    #                                                                           #
-    ##########################################################################
     def getcolorcell(self, *args):
         """
- Function: getcolorcell
-
- Description of Function:
-    Get an individual color cell in the active colormap. If default is
-    the active colormap, then return an error string.
-
-    If the the visul display is 16-bit, 24-bit, or 32-bit TrueColor, then a redrawing
-    of the VCS Canvas is made evertime the color cell is changed.
 
-    Note, the user can only change color cells 0 through 239 and R,G,B
-    value must range from 0 to 100. Where 0 represents no color intensity
-    and 100 is the greatest color intensity.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.setcolormap("AMIP")
-    a.getcolorcell(11,0,0,0)
-    a.getcolorcell(21,100,0,0)
-    a.getcolorcell(31,0,100,0)
-    a.getcolorcell(41,0,0,100)
-    a.getcolorcell(51,100,100,100)
-    a.getcolorcell(61,70,70,70)
-
-"""
+        """
+        """%s""" % vcs.getcolorcell.__doc__
         return vcs.getcolorcell(args[0], self)
 
-    ##########################################################################
-    #                                                                           #
-    # Get VCS color map name wrapper for VCS.                                   #
-    #                                                                           #
-    ##########################################################################
-    def getcolormapname(self, *args):
+    def getcolormapname(self):
         """
- Function: getcolormapcell
-
- Description of Function:
-    Get colormap name of the active colormap.
+        Returns the name of the colormap this canvas is set to use by default.
 
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.getcolormapname()
-"""
+        To set that colormap, use :ref:`vcs.Canvas.Canvas.setcolormap`.
+        """
         if self.colormap is None:
             return vcs._colorMap
         return self.colormap
@@ -5799,12 +5546,6 @@ Options:::
         print 'Keywords:', kargs
         return None
 
-#############################################################################
-#                                                                           #
-# Primarily used for reseting the animation date and time string.           #
-#                                                                           #
-#############################################################################
-
 
 def change_date_time(tv, number):
     timeaxis = tv.getTime()
diff --git a/Packages/vcs/vcs/VCS_validation_functions.py b/Packages/vcs/vcs/VCS_validation_functions.py
index 7595d900e36282404ce77d083d7af5ec5ef07984..271fb179381226586de9dd82b67112c2ff39b6e8 100644
--- a/Packages/vcs/vcs/VCS_validation_functions.py
+++ b/Packages/vcs/vcs/VCS_validation_functions.py
@@ -243,6 +243,18 @@ def checkListOfNumbers(self, name, value, minvalue=None,
     return list(value)
 
 
+def checkInStringList(self, name, value, options):
+    checkName(self, name, value)
+    if value not in options:
+        self.checkedRaise(
+            self,
+            value,
+            ValueError,
+            'Invalid value ' + value + '. Valid options are: ' +
+            ','.join(self.scaleoptions))
+    return value
+
+
 def checkFont(self, name, value):
     if (value is None):
         pass
@@ -1995,7 +2007,7 @@ def add_level_ext_2(self, ext_value):
         if isinstance(self.levels[0], list):  # remove from tuple of lists
             if self.levels[-1][1] > 9.e19:
                 self.levels.pop(-1)
-        if isinstance(self.levels, tuple):       # remove from list
+        if isinstance(self.levels, (tuple, list)):       # remove from list
             ret_tup = []
             for i in range(len(self.levels) - 1):
                 ret_tup.insert(i + 1, self.levels[i])
@@ -2009,15 +2021,16 @@ def add_level_ext_2(self, ext_value):
         return self.levels
 
     # We may need to add extnsion
-    if isinstance(self.levels, tuple):
-        self.levels = list(self.levels)
-    if isinstance(self.levels[-1], list):  # add to tuple of lists
-        if self.levels[-1][1] < 9.e19:
-            self.levels.append([self.levels[-1][1], 1e20])
-    else:
-        if self.levels[-1] < 9.e19:
-            self.levels.append(1.e20)
-    return self.levels
+    if isinstance(self.levels, (list, tuple)):
+        if isinstance(self.levels, tuple):
+            self.levels = list(self.levels)
+        if isinstance(self.levels[-1], list):  # add to tuple of lists
+            if self.levels[-1][1] < 9.e19:
+                self.levels.append([self.levels[-1][1], 1e20])
+        else:
+            if self.levels[-1] < 9.e19:
+                self.levels.append(1.e20)
+        return self.levels
 
 
 def _getext_1(self):
diff --git a/Packages/vcs/vcs/VTKAnimate.py b/Packages/vcs/vcs/VTKAnimate.py
index fe83c6b2bb6811ce0532fc59a9748b5ac86dcaef..76079826f26ac8cb25aef6fd72e4047609bb63d2 100644
--- a/Packages/vcs/vcs/VTKAnimate.py
+++ b/Packages/vcs/vcs/VTKAnimate.py
@@ -280,8 +280,8 @@ class VTKAnimate(animate_helper.AnimationController):
         be.showGUI()
         be.renWin.Render()
 
-    def draw_frame(self, frame_num=None, render_offscreen=True,
-                   allow_static=True, main_window_png=False):
+    def draw_frame(self, frame_num=None, render_offscreen=False,
+                   allow_static=False, main_window_png=True):
         """
         Draws a frame on the canvas
           frame_num: Which frame to draw- defaults to self.frame_num
@@ -295,8 +295,7 @@ class VTKAnimate(animate_helper.AnimationController):
         else:
             self.frame_num = frame_num
 
-        if render_offscreen or (
-                allow_static and len(self.animation_files) == self.number_of_frames()):
+        if len(self.animation_files) == self.number_of_frames():
             # Attempt to extract the renderers and place them onto the create
             # thread
             self.extract_renderers()
@@ -318,7 +317,7 @@ class VTKAnimate(animate_helper.AnimationController):
 
             self.vcs_self.backend.renWin.Render()
 
-            if main_window_png:
+            if main_window_png or self.playback_params.zoom_factor != 1:
                 png_name = self.create_thread.get_frame_name(self.frame_num)
                 self.vcs_self.png(png_name)
                 self.animation_files = sorted(
@@ -346,4 +345,5 @@ class VTKAnimate(animate_helper.AnimationController):
         self.draw_frame(
             frame_num=frame,
             allow_static=False,
-            render_offscreen=False)
+            render_offscreen=False,
+            main_window_png=False)
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 64de3e67760f65ada3fa0bad8216f11ad010c368..ec388c726d1f33a8f43c2f6833f4196c36474d41 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -46,18 +46,16 @@ class VTKVCSBackend(object):
         self._renderers = {}
         self._plot_keywords = [
             'cdmsfile',
-            'cell_coordinates'
-            # used to render the continents
-            'continents_renderer',
+            'cell_coordinates',
             # dataset bounds in lon/lat coordinates
             'dataset_bounds',
             # This may be smaller than the data viewport. It is used
             # if autot is passed
             'ratio_autot_viewport',
-            # used to render the dataset
-            'dataset_renderer',
-            # dataset scale: (xScale, yScale)
-            'dataset_scale',
+            # used to render the dataset for clicked point info (hardware selection)
+            'surface_renderer',
+            # (xScale, yScale) - datasets can be scaled using the window ratio
+            'surface_scale',
             # the same as vcs.utils.getworldcoordinates for now. getworldcoordinates uses
             # gm.datawc_... or, if that is not set, it uses data axis margins (without bounds).
             'plotting_dataset_bounds',
@@ -73,7 +71,7 @@ class VTKVCSBackend(object):
         # Initially set to 16x Multi-Sampled Anti-Aliasing
         self.antialiasing = 8
         self._rasterPropsInVectorFormats = False
-        self._initialGeometry = geometry
+        self._geometry = geometry
 
         if renWin is not None:
             self.renWin = renWin
@@ -138,99 +136,92 @@ class VTKVCSBackend(object):
             d = vcs.elements["display"][dnm]
             if d.array[0] is None:
                 continue
-            t = vcs.elements["template"][d.template]
-            gm = vcs.elements[d.g_type][d.g_name]
-            # for non-linear projection or for meshfill. Meshfill is wrapped at
-            # VTK level, so vcs calculations do not work.
-            if gm.projection != "linear" or gm.g_name == 'Gfm':
-                selector = vtk.vtkHardwareSelector()
-                datasetRenderer = d.backend['dataset_renderer']
-                continentsRenderer = d.backend.get('continents_renderer')
-                dataset = d.backend['vtk_backend_grid']
-                if (datasetRenderer and dataset):
-                    selector.SetRenderer(datasetRenderer)
-                    selector.SetArea(xy[0], xy[1], xy[0], xy[1])
-                    selector.SetFieldAssociation(vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS)
-                    # We want to be able see information behind continents
-                    if (continentsRenderer):
-                        continentsRenderer.SetDraw(False)
-                    selection = selector.Select()
-                    if (continentsRenderer):
-                        continentsRenderer.SetDraw(True)
-                    if (selection.GetNumberOfNodes() > 0):
-                        selectionNode = selection.GetNode(0)
-                        prop = selectionNode.GetProperties().Get(selectionNode.PROP())
-                        if (prop):
-                            cellIds = prop.GetMapper().GetInput().GetCellData().GetGlobalIds()
-                            if (cellIds):
-                                # scalar value
-                                a = selectionNode.GetSelectionData().GetArray(0)
-                                geometryId = a.GetValue(0)
-                                cellId = cellIds.GetValue(geometryId)
-                                scalars = dataset.GetCellData().GetScalars()
-                                value = scalars.GetValue(cellId)
-                                geoTransform = d.backend['vtk_backend_geo']
-                                if (geoTransform):
-                                    geoTransform.Inverse()
-                                # Use the world picker to get world coordinates
-                                # we deform the dataset, so we need to fix the
-                                # world picker using xScale, yScale
-                                xScale, yScale = d.backend['dataset_scale']
-                                worldPicker = vtk.vtkWorldPointPicker()
-                                worldPicker.Pick(xy[0], xy[1], 0, datasetRenderer)
-                                worldPosition = list(worldPicker.GetPickPosition())
-                                if (xScale > yScale):
-                                    worldPosition[0] /= (xScale/yScale)
-                                else:
-                                    worldPosition[1] /= (yScale/xScale)
-                                lonLat = worldPosition
-                                if (geoTransform):
-                                    geoTransform.InternalTransformPoint(worldPosition, lonLat)
-                                    geoTransform.Inverse()
-                                st += "Var: %s\n" % d.array[0].id
-                                if (float("inf") not in lonLat):
-                                    st += "X=%4.1f\nY=%4.1f\n" % (lonLat[0], lonLat[1])
-                                st += "Value: %g" % value
-            else:
-                if t.data.x1 <= x <= t.data.x2 and t.data.y1 <= y <= t.data.y2:
-                    x1, x2, y1, y2 = vcs.utils.getworldcoordinates(gm,
-                                                                   d.array[0].getAxis(-1),
-                                                                   d.array[0].getAxis(-2))
-
-                    X = (x - t.data.x1) / (t.data.x2 - t.data.x1) * (x2 - x1) + x1
-                    Y = (y - t.data.y1) / (t.data.y2 - t.data.y1) * (y2 - y1) + y1
-
-                    # Ok we now have the X/Y values we need to figure out the
-                    # indices
-                    try:
-                        I = d.array[0].getAxis(-1).mapInterval((X, X, 'cob'))[0]
-                        try:
-                            J = d.array[
-                                0].getAxis(-2).mapInterval((Y, Y, 'cob'))[0]
-                            # Values at that point
-                            V = d.array[0][..., J, I]
-                        except:
-                            V = d.array[0][..., I]
-                        if isinstance(V, numpy.ndarray):
-                            # Grab the appropriate time slice
-                            if self.canvas.animate.created():
-                                t = self.canvas.animate.frame_num
-                                try:
-                                    taxis = V.getTime()
-                                    V = V(time=taxis[t % len(taxis)]).flat[0]
-                                except:
-                                    V = V.flat[0]
+            # Use the hardware selector to determine the cell id we clicked on
+            selector = vtk.vtkHardwareSelector()
+            surfaceRenderer = d.backend['surface_renderer']
+            dataset = d.backend['vtk_backend_grid']
+            if (surfaceRenderer and dataset):
+                selector.SetRenderer(surfaceRenderer)
+                selector.SetArea(xy[0], xy[1], xy[0], xy[1])
+                selector.SetFieldAssociation(vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS)
+                # We only want to render the surface for selection
+                renderers = self.renWin.GetRenderers()
+                renderers.InitTraversal()
+                while(True):
+                    renderer = renderers.GetNextItem()
+                    if (renderer is None):
+                        break
+                    renderer.SetDraw(False)
+                surfaceRenderer.SetDraw(True)
+                selection = selector.Select()
+                renderers.InitTraversal()
+                while(True):
+                    renderer = renderers.GetNextItem()
+                    if (renderer is None):
+                        break
+                    renderer.SetDraw(True)
+                surfaceRenderer.SetDraw(False)
+                if (selection.GetNumberOfNodes() > 0):
+                    selectionNode = selection.GetNode(0)
+                    prop = selectionNode.GetProperties().Get(selectionNode.PROP())
+                    if (prop):
+                        cellIds = prop.GetMapper().GetInput().GetCellData().GetGlobalIds()
+                        if (cellIds):
+                            st += "Var: %s\n" % d.array[0].id
+                            # cell attribute
+                            a = selectionNode.GetSelectionData().GetArray(0)
+                            geometryId = a.GetValue(0)
+                            cellId = cellIds.GetValue(geometryId)
+                            attributes = dataset.GetCellData().GetScalars()
+                            if (attributes is None):
+                                attributes = dataset.GetCellData().GetVectors()
+                            elementId = cellId
+
+                            geoTransform = d.backend['vtk_backend_geo']
+                            if (geoTransform):
+                                geoTransform.Inverse()
+                            # Use the world picker to get world coordinates
+                            # we deform the dataset, so we need to fix the
+                            # world picker using xScale, yScale
+                            xScale, yScale = d.backend['surface_scale']
+                            worldPicker = vtk.vtkWorldPointPicker()
+                            worldPicker.Pick(xy[0], xy[1], 0, surfaceRenderer)
+                            worldPosition = list(worldPicker.GetPickPosition())
+                            if (xScale > yScale):
+                                worldPosition[0] /= (xScale/yScale)
                             else:
-                                V = V.flat[0]
-                        try:
-                            st += "Var: %s\nX[%i] = %4.1f\nY[%i] = %4.1f\nValue: %g" % (
-                                d.array[0].id, I, X, J, Y, V)
-                        except:
-                            st += "Var: %s\nX = %4.1f\nY[%i] = %4.1f\nValue: %g" % (
-                                d.array[0].id, X, I, Y, V)
-                    except:
-                        st += "Var: %s\nX=%g\nY=%g\nValue = N/A" % (
-                            d.array[0].id, X, Y)
+                                worldPosition[1] /= (yScale/xScale)
+                            lonLat = worldPosition
+                            if (attributes is None):
+                                # if point dataset, return the value for the closest point
+                                cell = dataset.GetCell(cellId)
+                                closestPoint = [0, 0, 0]
+                                subId = vtk.mutable(0)
+                                pcoords = [0, 0, 0]
+                                dist2 = vtk.mutable(0)
+                                weights = [0] * cell.GetNumberOfPoints()
+                                cell.EvaluatePosition(worldPosition, closestPoint,
+                                                      subId, pcoords, dist2, weights)
+                                indexMax = numpy.argmax(weights)
+                                pointId = cell.GetPointId(indexMax)
+                                attributes = dataset.GetPointData().GetScalars()
+                                if (attributes is None):
+                                    attributes = dataset.GetPointData().GetVectors()
+                                elementId = pointId
+                            if (geoTransform):
+                                geoTransform.InternalTransformPoint(worldPosition, lonLat)
+                                geoTransform.Inverse()
+                            if (float("inf") not in lonLat):
+                                st += "X=%4.1f\nY=%4.1f\n" % (lonLat[0], lonLat[1])
+                            # get the cell value or the closest point value
+                            if (attributes):
+                                if (attributes.GetNumberOfComponents() > 1):
+                                    v = attributes.GetTuple(elementId)
+                                    st += "Value: (%g, %g)" % (v[0], v[1])
+                                else:
+                                    value = attributes.GetValue(elementId)
+                                    st += "Value: %g" % value
+
         if st == "":
             return
         ren = vtk.vtkRenderer()
@@ -299,23 +290,21 @@ class VTKVCSBackend(object):
             parg.append(d.g_type)
             parg.append(d.g_name)
             plots_args.append(parg)
-            kwarg = {}
+            key = {"display_name": dnm}
             if d.ratio is not None:
-                kwarg["ratio"] = d.ratio
-
-            kwarg["continents"] = d.continents
-            kwarg["continents_line"] = d.continents_line
+                key["ratio"] = d.ratio
+            key["continents"] = d.continents
+            key["continents_line"] = d.continents_line
+            key_args.append(key)
 
-            key_args.append(kwarg)
-
-        # Have to pull out the UI layer so it doesn't get borked by the clear
+        # Have to pull out the UI layer so it doesn't get borked by the z
         self.hideGUI()
 
         if self.canvas.configurator is not None:
             restart_anim = self.canvas.configurator.animation_timer is not None
         else:
             restart_anim = False
-        self.canvas.clear(render=False)
+        self.canvas.clear(render=False, preserve_display=True)
 
         for i, pargs in enumerate(plots_args):
             self.canvas.plot(*pargs, render=False, **key_args[i])
@@ -385,9 +374,9 @@ class VTKVCSBackend(object):
             # turning off antialiasing by default
             # mostly so that pngs are same accross platforms
             self.renWin.SetMultiSamples(self.antialiasing)
-            if self._initialGeometry is not None:
-                width = self._initialGeometry["width"]
-                height = self._initialGeometry["height"]
+            if self._geometry is not None:
+                width = self._geometry["width"]
+                height = self._geometry["height"]
             else:
                 width = None
                 height = None
@@ -446,9 +435,9 @@ class VTKVCSBackend(object):
             if (self.bg):
                 height = self.canvas.bgY
                 width = self.canvas.bgX
-            elif (self._initialGeometry):
-                height = self._initialGeometry['height']
-                width = self._initialGeometry['width']
+            elif (self._geometry):
+                height = self._geometry['height']
+                width = self._geometry['width']
             else:
                 height = self.canvas.bgY
                 width = self.canvas.bgX
@@ -555,8 +544,18 @@ class VTKVCSBackend(object):
         else:
             return True
 
-    def geometry(self, x, y, *args):
-        self.renWin.SetSize(x, y)
+    def geometry(self, *args):
+        if len(args) == 0:
+            return self._geometry
+        if len(args) < 2:
+            raise TypeError("Function takes zero or two <width, height> "
+                            "or more than two arguments. Got " + len(*args))
+        x = args[0]
+        y = args[1]
+
+        if self.renWin is not None:
+            self.renWin.SetSize(x, y)
+        self._geometry = {'width': x, 'height': y}
         self._lastSize = (x, y)
 
     def flush(self):
@@ -598,6 +597,7 @@ class VTKVCSBackend(object):
 
         vtk_backend_grid = kargs.get("vtk_backend_grid", None)
         vtk_backend_geo = kargs.get("vtk_backend_geo", None)
+        bounds = vtk_backend_grid.GetBounds() if vtk_backend_grid else None
 
         pipeline = vcsvtk.createPipeline(gm, self)
         if pipeline is not None:
@@ -627,7 +627,7 @@ class VTKVCSBackend(object):
                     ren,
                     to=to,
                     tt=tt,
-                    cmap=self.canvas.colormap)
+                    cmap=self.canvas.colormap, geoBounds=bounds, geo=vtk_backend_geo)
                 self.setLayer(ren, tt.priority)
                 self.text_renderers[tt_key] = ren
         elif gtype == "line":
@@ -636,7 +636,6 @@ class VTKVCSBackend(object):
                                           cmap=self.canvas.colormap)
                 returned["vtk_backend_line_actors"] = actors
                 create_renderer = True
-                bounds = vtk_backend_grid.GetBounds() if vtk_backend_grid else None
                 for act, geo in actors:
                     ren = self.fitToViewport(
                         act,
@@ -689,21 +688,16 @@ class VTKVCSBackend(object):
         self.scaleLogo()
 
         # Decide whether to rasterize background in vector outputs
-        # Current criteria to rasterize:
+        # Current limitation to vectorize:
         #       * if fillarea style is either pattern or hatch
-        #       * if fillarea opacity is less than 100 for solid fill
         try:
             if gm.style and all(style != 'solid' for style in gm.style):
                 self._rasterPropsInVectorFormats = True
-            elif gm.opacity and not all(o == 100 for o in gm.opacity):
-                self._rasterPropsInVectorFormats = True
         except:
             pass
         try:
             if gm.fillareastyle in ['pattern', 'hatch']:
                 self._rasterPropsInVectorFormats = True
-            elif not all(o == 100 for o in gm.fillareaopacity):
-                self._rasterPropsInVectorFormats = True
         except:
             pass
 
@@ -757,7 +751,10 @@ class VTKVCSBackend(object):
                 plot.onClosing(cell)
 
     def plotContinents(self, wc, projection, wrap, vp, priority, **kargs):
-        contData = vcs2vtk.prepContinents(self.canvas._continentspath())
+        continents_path = self.canvas._continentspath()
+        if continents_path is None:
+            return (None, 1, 1)
+        contData = vcs2vtk.prepContinents(continents_path)
         contMapper = vtk.vtkPolyDataMapper()
         contMapper.SetInputData(contData)
         contActor = vtk.vtkActor()
@@ -858,9 +855,9 @@ class VTKVCSBackend(object):
                     ren = self.createRenderer()
                     self.renWin.AddRenderer(ren)
                     self.setLayer(ren, 1)
-                    self._renderers[(None, None, None)] = ren
+                    self._renderers[(None, None, None)] = (ren, 1, 1)
                 else:
-                    ren = self._renderers[(None, None, None)]
+                    ren, xratio, yratio = self._renderers[(None, None, None)]
                 tt, to = crdate.name.split(":::")
                 tt = vcs.elements["texttable"][tt]
                 to = vcs.elements["textorientation"][to]
@@ -895,9 +892,9 @@ class VTKVCSBackend(object):
                     ren = self.createRenderer()
                     self.renWin.AddRenderer(ren)
                     self.setLayer(ren, 1)
-                    self._renderers[(None, None, None)] = ren
+                    self._renderers[(None, None, None)] = (ren, 1, 1)
                 else:
-                    ren = self._renderers[(None, None, None)]
+                    ren, xratio, yratio = self._renderers[(None, None, None)]
                 tt, to = zname.name.split(":::")
                 tt = vcs.elements["texttable"][tt]
                 to = vcs.elements["textorientation"][to]
@@ -1061,8 +1058,28 @@ class VTKVCSBackend(object):
                 break
         return plot
 
-    def vectorGraphics(
-            self, output_type, file, width=None, height=None, units=None):
+    def vectorGraphics(self, output_type, file, width=None, height=None,
+                       units=None, textAsPaths=True):
+        """Export vector graphics to PDF, Postscript, SVG and EPS format.
+
+       Reasoning for textAsPaths as default:
+       The output formats supported by gl2ps which VTK uses for postscript/pdf/svg/etc
+       vector exports) handle text objects inconsistently. For example, postscript mangles
+       newlines, pdf doesn't fully support rotation and alignment, stuff like that.
+       These are limitations in the actual format specifications themselves.
+
+       On top of that, embedding text objects then relies on the viewer to locate
+       a similar font and render the text, and odds are good that the fonts used
+       by the viewer will have different characteristics than the ones used in the
+       original rendering. So, for instance, you have some right-justified lines of
+       text, like the data at the top of the VCS plots. If the font used by the viewer
+       uses different widths for any of glyphs composing the text, the text will be
+       unaligned along the right-hand side, since the text is always anchored on
+       it's left side due to how these formats represent text objects. This just looks bad.
+       Exporting text as paths eliminates all of these problems with portability across
+       viewers and inconsistent text object handling between output formats.
+       """
+
         if self.renWin is None:
             raise Exception("Nothing on Canvas to dump to file")
 
@@ -1087,13 +1104,19 @@ class VTKVCSBackend(object):
 
         # Since the patterns are applied as textures on vtkPolyData, enabling
         # background rasterization is required to write them out
+
         if self._rasterPropsInVectorFormats:
             gl.Write3DPropsAsRasterImageOn()
 
         gl.SetInput(self.renWin)
         gl.SetCompress(0)  # Do not compress
         gl.SetFilePrefix(".".join(file.split(".")[:-1]))
-        gl.TextAsPathOn()
+
+        if textAsPaths:
+            gl.TextAsPathOn()
+        else:
+            gl.TextAsPathOff()
+
         if output_type == "svg":
             gl.SetFileFormatToSVG()
         elif output_type == "ps":
@@ -1110,14 +1133,17 @@ class VTKVCSBackend(object):
         self.showGUI()
 
     def postscript(self, file, width=None, height=None,
-                   units=None):
-        return self.vectorGraphics("ps", file, width, height, units)
+                   units=None, textAsPaths=True):
+        return self.vectorGraphics("ps", file, width, height,
+                                   units, textAsPaths)
 
-    def pdf(self, file, width=None, height=None, units=None):
-        return self.vectorGraphics("pdf", file, width, height, units)
+    def pdf(self, file, width=None, height=None, units=None, textAsPaths=True):
+        return self.vectorGraphics("pdf", file, width, height,
+                                   units, textAsPaths)
 
-    def svg(self, file, width=None, height=None, units=None):
-        return self.vectorGraphics("svg", file, width, height, units)
+    def svg(self, file, width=None, height=None, units=None, textAsPaths=True):
+        return self.vectorGraphics("svg", file, width,
+                                   height, units, textAsPaths)
 
     def gif(self, filename='noname.gif', merge='r', orientation=None,
             geometry='1600x1200'):
@@ -1214,7 +1240,39 @@ class VTKVCSBackend(object):
         return VTKAnimate.VTKAnimate(*args, **kargs)
 
     def gettextextent(self, textorientation, texttable):
-        warnings.warn("Please implement gettextextent for VTK Backend")
+        # Ensure renwin exists
+        self.createRenWin()
+
+        if isinstance(textorientation, (str, unicode)):
+            textorientation = vcs.gettextorientation(textorientation)
+        if isinstance(texttable, (str, unicode)):
+            texttable = vcs.gettexttable(texttable)
+
+        from vtk_ui.text import text_dimensions
+
+        text_property = vtk.vtkTextProperty()
+        info = self.canvasinfo()
+        win_size = info["width"], info["height"]
+        vcs2vtk.prepTextProperty(text_property, win_size, to=textorientation, tt=texttable)
+
+        dpi = self.renWin.GetDPI()
+
+        length = max(len(texttable.string), len(texttable.x), len(texttable.y))
+
+        strings = texttable.string + [texttable.string[-1]] * (length - len(texttable.string))
+        xs = texttable.x + [texttable.x[-1]] * (length - len(texttable.x))
+        ys = texttable.y + [texttable.y[-1]] * (length - len(texttable.y))
+
+        labels = zip(strings, xs, ys)
+
+        extents = []
+
+        for s, x, y in labels:
+            width, height = text_dimensions(s, text_property, dpi)
+            extents.append([x, x + float(width) / win_size[0], y, y + float(height) / win_size[1]])
+
+        return extents
+
 
     def getantialiasing(self):
         if self.renWin is None:
@@ -1497,7 +1555,10 @@ class VTKVCSBackend(object):
                                 float(cdutil.averager(array1, axis=" ".join(["(%s)" %
                                                                              S for S in array1.getAxisIds()])))
                         except:
-                            meanstring = 'Mean %.4g' % array1.mean()
+                            try:
+                                meanstring = 'Mean %.4g' % array1.mean()
+                            except:
+                                meanstring = 'Mean %.4g' % numpy.mean(array1.filled())
                     t.SetInput(meanstring)
                 elif att == "crdate" and tstr is not None:
                     t.SetInput(tstr.split()[0].replace("-", "/"))
@@ -1521,3 +1582,9 @@ class VTKVCSBackend(object):
         img = reader.GetOutput()
         size = img.GetDimensions()
         return size[0], size[1]
+
+    def raisecanvas(self):
+        if self.renWin is None:
+            warnings.warn("Cannot raise if you did not open the canvas yet.")
+            return
+        self.renWin.MakeCurrent()
diff --git a/Packages/vcs/vcs/__init__.py b/Packages/vcs/vcs/__init__.py
index 26e87d84a22174d6f0505961540ee60f9371b642..e450c23fb8e0add7bbbc91868336085cd56c7306 100755
--- a/Packages/vcs/vcs/__init__.py
+++ b/Packages/vcs/vcs/__init__.py
@@ -1,27 +1,48 @@
 """
-# VCS Visualization and Control System - (VCS) module
-#
-#################################################################################
-#                                                                               #
-# Module:       vcs module                                                      #
-#                                                                               #
-# Authors:      PCMDI Software Team                                             #
-#               support@pcmdi.llnl.gov                                          #
-#               http://cdat.sf.net/cdat                                         #
-#                                                                               #
-# Description:  Python command wrapper for VCS's functionality. VCS is computer #
-#               software for the selection, manipulation, and display of        #
-#               scientific data. By specification of the desired data, the      #
-#               graphics method, and the display template, the VCS user gains   #
-#               virtually complete control of the appearance of the data        #
-#               display and associated text and animation.                      #
-#                                                                               #
-# Upgrade to VTK:                                                               #
-# Author: Charles Doutriaux                                                     #
-# Description: Took out all C code and used VTK's python bindings instead       #
-#                                                                               #
-#################################################################################
+=====================================
+VCS: Visualization and Control System
+=====================================
+
+-------
+Authors
+-------
+
+Creator: Dean Williams (LLNL, AIMS Team)
+
+Lead Developer: Charles Doutriaux (LLNL, AIMS Team)
+
+Contributors: https://github.com/UV-CDAT/uvcdat/graphs/contributors
+
+Support Email: uvcdat-support@llnl.gov
+
+Project Site: http://uvcdat.llnl.gov/
+
+Project Repo: https://github.com/UV-CDAT/uvcdat/graphs/contributors
+
+-----------
+Description
+-----------
+VCS is a visualization library for scientific data. It has a simple
+model for defining a plot, that is decomposed into three parts:
+
+1. **Data**: If it's iterable, we'll plot it... or at least try!
+   Currently we support numpy arrays, lists (nested and not),
+   and CDMS2 variables (there's some special support for metadata
+   from CDMS2 that gives some niceties in your plot, but it's not
+   mandatory).
+2. **Graphics Method**: We have a variety of plot types that we
+   support out-of-the box; you can easily customize every aspect
+   of them to create the effect that you're looking for. If you can't,
+   we also support defining your own graphics methods, which you can
+   share with other users using standard python infrastructure (conda, pip).
+3. **Template**: Templates control the appearance of everything that
+   *isn't* your data. They position labels, control fonts, adjust borders,
+   place legends, and more. They're very flexible, and give the fine-grained
+   control of your plot that is needed for the truly perfect plot. Once you've
+   customized them, you can also save them out for later use, and distribute
+   them to other users.
 """
+
 _doValidation = True
 next_canvas_id = 1
 import cdat_info  # noqa
@@ -223,37 +244,45 @@ if os.path.exists(user_init):
     vcs.scriptrun(user_init)
 
 canvaslist = []
-#
-#
-# Construct a VCS Canvas Object.                                                #
-#
-#
 
 
 def init(mode=1, pause_time=0, call_from_gui=0, size=None,
          backend="vtk", geometry=None, bg=None):
     '''
- Function: init   # Initialize, Construct a VCS Canvas Object
-
- Description of Function:
-    Construct the VCS Canas object.
-
- Example of Use:
-    import vcs,cdms2
-
-    file=cdms2.open('filename.nc')
-    slab=file.getslab('variable')
-    a=vcs.init()                        # This examples constructs 4 VCS Canvas
-    a.plot(slab)                        # Plot slab using default settings
-    b=vcs.init()                        # Construct VCS object
-    template=b.gettemplate('AMIP')      # Get 'example' template object
-    b.plot(slab,template)               # Plot slab using template 'AMIP'
-    c=vcs.init()                        # Construct new VCS object
-    isofill=c.getisofill('quick')       # Get 'quick' isofill graphics method
-    c.plot(slab,template,isofill)       # Plot slab using template and isofill objects
-    d=vcs.init()                        # Construct new VCS object
-    isoline=c.getisoline('quick')       # Get 'quick' isoline graphics method
-    c.plot(isoline,slab,template)       # Plot slab using isoline and template objects
+    Initialize and construct a VCS Canvas object.
+
+    :Example:
+
+::
+
+    import vcs
+
+    # Portrait orientation of 1 width per 2 height
+    portrait = vcs.init(size=.5)
+    # also accepts "usletter"
+    letter = vcs.init(size="letter")
+    a4 = vcs.init(size="a4")
+
+    import vtk
+    # Useful for embedding VCS inside another application
+    my_win = vtk.vtkRenderWindow()
+    embedded = vcs.init(backend=my_win)
+
+    dict_init = vcs.init(geometry={"width": 1200, "height": 600})
+    tuple_init = vcs.init(geometry=(1200, 600))
+
+    bg_canvas = vcs.init(bg=True)
+
+:param size: Aspect ratio for canvas (width / height)
+:param backend: Which VCS backend to use
+:param geometry: Size (in pixels) you want the canvas to be.
+:param bg: Initialize a canvas to render in "background" mode (without displaying a window)
+:type size: float or case-insensitive str
+:type backend: str, `vtk.vtkRenderWindow`
+:type geometry: dict or tuple
+:type bg: bool
+:return: an initialized canvas
+:rtype: `vcs.Canvas.Canvas`
 '''
     canvas = Canvas.Canvas(
         mode=mode,
diff --git a/Packages/vcs/vcs/boxfill.py b/Packages/vcs/vcs/boxfill.py
index 231b228399c3dd71ca6ab2f34e9e5fb97470f12d..b403895fca8b42969695f855c6596da348d9eed3 100755
--- a/Packages/vcs/vcs/boxfill.py
+++ b/Packages/vcs/vcs/boxfill.py
@@ -24,6 +24,7 @@ import vcs
 import cdtime
 import VCS_validation_functions
 import xmldocs
+import numpy
 import warnings
 
 
@@ -734,6 +735,76 @@ class Gfb(object):
         self.yaxisconvert = yat
     xyscale.__doc__ = xmldocs.xyscaledoc
 
+    def getlevels(self, varmin, varmax):
+        if self.boxfill_type == "custom":
+            return self.levels
+
+        nlev = float(self.color_2 - self.color_1 + 1)
+        autolevels = False
+
+        if numpy.allclose(self.level_1, 1.e20) or numpy.allclose(self.level_2, 1.e20):
+            autolevels = True
+            low_end = varmin
+            high_end = varmax
+        else:
+            low_end = self.level_1
+            high_end = self.level_2
+
+        if self.boxfill_type == "log10":
+            low_end = numpy.ma.log10(low_end)
+            high_end = numpy.ma.log10(high_end)
+
+        if autolevels:
+            # Use nice values for the scale
+            scale = vcs.mkscale(low_end, high_end)
+            low_end = scale[0]
+            high_end = scale[-1]
+
+        dx = (high_end - low_end) / nlev
+
+        if dx == 0:
+            high_end += .00001
+            return [low_end, high_end]
+        float_epsilon = numpy.finfo(numpy.float32).eps
+        contourLevels = numpy.arange(low_end, high_end + float_epsilon, dx)
+
+        return contourLevels
+
+    def getlegendlabels(self, levels):
+        if self.legend:
+            return self.legend
+
+        if numpy.allclose(self.level_1, 1.e20) or numpy.allclose(self.level_2, 1.e20):
+            autolevels = True
+        else:
+            autolevels = False
+
+        if len(levels) > 12:
+            scale = vcs.mkscale(levels[0], levels[-1])
+            if autolevels:
+                return vcs.mklabels(scale)
+            else:
+                # Create our own scale
+                dx = (self.level_2 - self.level_1) / float(len(scale) - 1)
+                real_values = [self.level_1, self.level_2]
+                float_epsilon = numpy.finfo(numpy.float32).eps
+                levels = numpy.arange(levels[0], levels[-1] + float_epsilon, dx)
+        else:
+            real_values = levels
+
+        # Need to line up the levels and the labels, so we'll massage the label positions
+        max_round = 0
+        for l in real_values:
+            round_pos = 0
+            while numpy.round(l, round_pos) != l:
+                round_pos += 1
+            max_round = max(max_round, round_pos)
+
+        round_values = [numpy.round(l, round_pos) for l in levels]
+        round_labels = vcs.mklabels(round_values, "list")
+
+        return {lev: label for lev, label in zip(levels, round_labels)}
+
     ###########################################################################
     #                                                                         #
     # List out boxfill graphics method members (attributes).                  #
diff --git a/Packages/vcs/vcs/displayplot.py b/Packages/vcs/vcs/displayplot.py
index 1f00450ba1cb689c079cada688bd2eb0e4aab497..80638f22606e7e926d50d777cfbeeda6fdf46daa 100755
--- a/Packages/vcs/vcs/displayplot.py
+++ b/Packages/vcs/vcs/displayplot.py
@@ -209,9 +209,10 @@ class Dp(object):
         return self._g_type
 
     def _setg_type(self, value):
+        import vcsaddons
         value = VCS_validation_functions.checkString(self, 'g_type', value)
         value = value.lower()
-        if value not in vcs.elements and value != "text":
+        if value not in vcs.elements and value != "text" and value not in vcsaddons.gms:
             raise ValueError(
                 "invalid g_type '%s' must be one of: %s " %
                 (value, vcs.elements.keys()))
@@ -259,6 +260,7 @@ class Dp(object):
             self._g_name = "default"
             self._array = []
             self._continents = 1
+            self._continents_line = "default"
             self.ratio = None
         else:
             src = vcs.elements["display"][Dp_name_src]
@@ -269,6 +271,7 @@ class Dp(object):
             self.g_type = src.g_type
             self.g_name = src.g_name
             self.continents = src.continents
+            self.continents_line = src.continents_line
             self.priority = src.priority
             self.ratio = src.ratio
 
diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py
index 19a35a8087bc5da9799c9ca5a4f2ea5171121db8..4a30aed8e62f20547b8938b6a4b57cad340d9624 100644
--- a/Packages/vcs/vcs/dv3d.py
+++ b/Packages/vcs/vcs/dv3d.py
@@ -126,19 +126,37 @@ class Gfdv3d(object):
         self.projection = 'default'
         self.provenanceHandler = None
 
+        vcs.elements[self.g_name][Gfdv3d_name] = self
+
+        self._axes = "xyz"
+
+        # Use parent config values if possible
+        if isinstance(Gfdv3d_name_src, (unicode, str)):
+            # Make sure we aren't inheriting from ourself
+            if Gfdv3d_name_src != Gfdv3d_name:
+                parent_cfg = vcs.elements[self.g_name][Gfdv3d_name_src].cfgManager
+                self._axes = vcs.elements[self.g_name][Gfdv3d_name_src]._axes
+            else:
+                parent_cfg = None
+        else:
+            # Make sure we aren't inheriting from ourself
+            if Gfdv3d_name_src.name != self.name:
+                parent_cfg = Gfdv3d_name_src.cfgManager
+                self._axes = Gfdv3d_name_src._axes
+            else:
+                parent_cfg = None
+
+        self.cfgManager = ConfigManager(cm=parent_cfg)
+
         if Gfdv3d_name == "Hovmoller3D":
             self._axes = "xyt"
-        else:
-            self._axes = "xyz"
 
-        self.cfgManager = ConfigManager()
         self.ncores = multiprocessing.cpu_count()
+
         self.addParameters()
 
-        vcs.elements[self.g_name][Gfdv3d_name] = self
         self.plot_attributes['name'] = self.g_name
         self.plot_attributes['template'] = Gfdv3d_name
-#        print "Adding VCS element: %s %s " % ( self.g_name, Gfdv3d_name )
 
     def setProvenanceHandler(self, provenanceHandler):
         self.provenanceHandler = provenanceHandler
@@ -215,14 +233,14 @@ class Gf3Dvector(Gfdv3d):
 
     def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'):
         self.g_name = '3d_vector'
-        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default')
+        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src)
 
 
 class Gf3Dscalar(Gfdv3d):
 
     def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'):
         self.g_name = '3d_scalar'
-        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default')
+        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src)
         self.VectorDisplay = Gfdv3d_name
 
 
@@ -230,7 +248,7 @@ class Gf3DDualScalar(Gfdv3d):
 
     def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'):
         self.g_name = '3d_dual_scalar'
-        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default')
+        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src)
 
 if __name__ == '__main__':
     dv3d = vcs.get3d_scalar()
diff --git a/Packages/vcs/vcs/isoline.py b/Packages/vcs/vcs/isoline.py
index c1912c57aa164b000d597993097c20ddef4c14dc..5961dfbad7b74f81305ff426f75346bf0517f6a0 100755
--- a/Packages/vcs/vcs/isoline.py
+++ b/Packages/vcs/vcs/isoline.py
@@ -310,6 +310,8 @@ class Gi(object):
         iso.linewidths=([1,2,3,4,5,6,7,8])	# Will set the isoline to a specific
                                                 #     width size
         iso.linewidths=None			# Turns off the line width size
+    If the number of line styles, colors or widths are less than the number of levels
+    we extend the attribute list using the last attribute value in the attribute list.
 
     There are three ways to specify the text or font number:
         iso.text=(1,2,3,4,5,6,7,8,9)     	# Font numbers are between 1 and 9
diff --git a/Packages/vcs/vcs/manageElements.py b/Packages/vcs/vcs/manageElements.py
index c61f762b5a99b472137ed2ef760c60de49574e06..751986a5e6f7865b1bc485ab887c2e3a82157311 100644
--- a/Packages/vcs/vcs/manageElements.py
+++ b/Packages/vcs/vcs/manageElements.py
@@ -67,21 +67,35 @@ def check_name_source(name, source, typ):
 
 def createtemplate(name=None, source='default'):
     """
-Function: createtemplate                  # Construct a new template
+    Create a new template given the the name and the existing template to copy
+    the attributes from. If no existing template name is given, then the default
+    template will be used as the template to which the attributes will be copied
+    from.
+
+    If the name provided already exists, then an error will be returned. Template
+    names must be unique.
+
+    :Example:
+
+::
+
+    # create 'example1' template from 'default' template
+    con=vcs.createtemplate('example1')
+    # Show all the existing templates
+    vcs.listelements('template')
+    # create 'example2' from 'quick' template
+    con=vcs.createtemplate('example2','quick')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a template or a string name of a template
+
+:returns: A template
+:rtype: vcs.template.P
 
-Description of Function:
-Create a new template given the the name and the existing template to copy
-the attributes from. If no existing template name is given, then the default
-template will be used as the template to which the attributes will be copied
-from.
-
-If the name provided already exists, then a error will be returned. Template
-names must be unique.
-
-Example of Use:
-con=vcs.createtemplate('example1') # create 'example1' template from 'default' template
-vcs.listelements('template')                       # Show all the existing templates
-con=vcs.createtemplate('example2','quick') # create 'example2' from 'quick' template
 """
     name, source = check_name_source(name, source, 'template')
 
@@ -90,22 +104,31 @@ con=vcs.createtemplate('example2','quick') # create 'example2' from 'quick' temp
 
 def gettemplate(Pt_name_src='default'):
     """
-Function: gettemplate                       # Construct a new template
+    VCS contains a list of predefined templates. This function will create a
+    template class object from an existing VCS template. If no template name
+    is given, then template 'default' will be used.
 
-Description of Function:
-VCS contains a list of predefined templates. This function will create a
-template class object from an existing VCS template. If no template name
-is given, then template 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtemplate function.)
-
-Example of Use:
-vcs.listelements('template')                  # Show all the existing templates
-templt=vcs.gettemplate()              # templt instance of 'default' template
-templt2=vcs.gettemplate('quick')      # templt2 contains 'quick' template
-"""
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtemplate function.)
+
+    :Example:
+
+::
+
+    # Show all the existing templates
+    vcs.listelements('template')
+    # templt instance of 'default' template
+    templt=vcs.gettemplate()
+    # templt2 contains 'quick' template
+    templt2=vcs.gettemplate('quick')
+
+:param Pt_name_src: String name of an existing template VCS object
+:type Pt_name_src:
+
+:returns: A VCS template object
+:rtype: vcs.template.P
+    """
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Pt_name_src, str):
         raise vcsError('The argument must be a string.')
@@ -117,24 +140,34 @@ templt2=vcs.gettemplate('quick')      # templt2 contains 'quick' template
 
 def createprojection(name=None, source='default'):
     """
-Function: createprojection                # Construct a new projection method
+    Create a new projection method given the the name and the existing
+    projection method to copy the attributes from. If no existing
+    projection method name is given, then the default projection
+    method will be used as the projection method to which the attributes will
+    be copied from.
+
+    If the name provided already exists, then an error will be returned. Projection
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('projection')
+    p=vcs.createprojection('example1',)
+    vcs.show('projection')
+    p=vcs.createprojection('example2','quick')
+    vcs.show('projection')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a projection or a string name of a projection
+
+:returns: A projection graphics method object
+:rtype: vcs.projection.Proj
 
-Description of Function:
-Create a new projection method given the the name and the existing
-projection method to copy the attributes from. If no existing
-projection method name is given, then the default projection
-method will be used as the projection method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Projection
-method names must be unique.
-
-Example of Use:
-vcs.show('projection')
-p=vcs.createprojection('example1',)
-vcs.show('projection')
-p=vcs.createprojection('example2','quick')
-vcs.show('projection')
 """
 
     name, source = check_name_source(name, source, 'projection')
@@ -143,24 +176,31 @@ vcs.show('projection')
 
 def getprojection(Proj_name_src='default'):
     """
-Function: getprojection                    # Construct a new projection method
+    VCS contains a list of graphics methods. This function will create a
+    projection class object from an existing VCS projection method. If
+    no projection name is given, then projection 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-projection class object from an existing VCS projection method. If
-no projection name is given, then projection 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createprojection function.)
-
-Example of Use:
-vcs.show('projection')                   # Show all the existing projection methods
-p=vcs.getprojection()                  # box instance of 'default' projection
-                                    # method
-p2=vcs.getprojection('quick')          # box2 instance of existing 'quick' projection
-                                    #         graphics method
-"""
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createprojection function.)
+
+    :Example:
+
+::
+
+    # Show all the existing projection methods
+    vcs.show('projection')
+    # box instance of 'default' projection method
+    p=vcs.getprojection()
+    # box2 instance of existing 'quick' projection graphics method
+    p2=vcs.getprojection('quick')
+
+:param Proj_name_src: String name of an existing VCS projection object
+:type Proj_name_src: str
+
+:returns: A VCS projection object
+:rtype: vcs.projection.Proj
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Proj_name_src, str):
@@ -185,25 +225,33 @@ Output:::
 %s
 :::
 
-Function: createboxfill                # Construct a new boxfill graphics method
+    Create a new boxfill graphics method given the the name and the existing
+    boxfill graphics method to copy the attributes from. If no existing
+    boxfill graphics method name is given, then the default boxfill graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new boxfill graphics method given the the name and the existing
-boxfill graphics method to copy the attributes from. If no existing
-boxfill graphics method name is given, then the default boxfill graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('boxfill')
-box=vcs.createboxfill('example1',)
-vcs.show('boxfill')
-box=vcs.createboxfill('example2','quick')
-vcs.show('boxfill')
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('boxfill')
+    box=vcs.createboxfill('example1',)
+    vcs.show('boxfill')
+    box=vcs.createboxfill('example2','quick')
+    vcs.show('boxfill')
 
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a boxfill or a string name of a boxfill
+
+:return: A boxfill graphics method object
+:rtype: vcs.boxfill.Gfb
 """
 
     name, source = check_name_source(name, source, 'boxfill')
@@ -225,31 +273,28 @@ Input:::
 Output:::
 %s
 :::
-Function: getboxfill                        # Construct a new boxfill graphics method
-
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-boxfill class object from an existing VCS boxfill graphics method. If
-no boxfill name is given, then boxfill 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createboxfill function.)
-
-Example of Use:
-vcs.show('boxfill')                   # Show all the existing boxfill graphics methods
-box=vcs.getboxfill()                  # box instance of 'default' boxfill graphics
-                                    # method
-box2=vcs.getboxfill('quick')          # box2 instance of existing 'quick' boxfill
-                                    #         graphics method
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End getboxfill Description ################################################
-#########################################                            #################################################
-######################################################################################################################
-
+    VCS contains a list of graphics methods. This function will create a
+    boxfill class object from an existing VCS boxfill graphics method. If
+    no boxfill name is given, then boxfill 'default' will be used.
+
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createboxfill function.)
+
+    :Example:
+    # Show all the existing boxfill graphics methods
+    vcs.show('boxfill')
+    # box instance of 'default' boxfill graphics method
+    box=vcs.getboxfill()
+    # box2 instance of existing 'quick' boxfill graphics method
+    box2=vcs.getboxfill('quick')
+
+:param Gfb_name_src: String name of an existing boxfill VCS object
+:type Gfb_name_src: str
+
+:return: A pre-existing boxfill graphics method
+:rtype: vcs.boxfill.Gfb
 """
-
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gfb_name_src, str):
         raise vcsError('The argument must be a string.')
@@ -263,24 +308,34 @@ getboxfill.__doc__ = getboxfill.__doc__ % (
 
 def createtaylordiagram(name=None, source='default'):
     """
-Function: createtaylordiagram  # Construct a new taylordiagram graphics method
+    Create a new taylordiagram graphics method given the the name and the existing
+    taylordiagram graphics method to copy the attributes from. If no existing
+    taylordiagram graphics method name is given, then the default taylordiagram graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
+
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('taylordiagram')
+    td=vcs.createtaylordiagram('example1',)
+    vcs.show('taylordiagram')
+    td=vcs.createtaylordiagram('example2','quick')
+    vcs.show('taylordiagram')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a taylordiagram or a string name of a
+
+:returns: A taylordiagram graphics method object
+:rtype: vcs.taylor.Gtd
 
-Description of Function:
-Create a new taylordiagram graphics method given the the name and the existing
-taylordiagram graphics method to copy the attributes from. If no existing
-taylordiagram graphics method name is given, then the default taylordiagram graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('taylordiagram')
-td=vcs.createtaylordiagram('example1',)
-vcs.show('taylordiagram')
-td=vcs.createtaylordiagram('example2','quick')
-vcs.show('taylordiagram')
 """
 
     name, source = check_name_source(name, source, 'taylordiagram')
@@ -300,24 +355,31 @@ vcs.show('taylordiagram')
 
 def gettaylordiagram(Gtd_name_src='default'):
     """
-Function: gettaylordiagram                     # Construct a new taylordiagram graphics method
+    VCS contains a list of graphics methods. This function will create a
+    taylordiagram class object from an existing VCS taylordiagram graphics method. If
+    no taylordiagram name is given, then taylordiagram 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-taylordiagram class object from an existing VCS taylordiagram graphics method. If
-no taylordiagram name is given, then taylordiagram 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createboxfill function.)
-
-Example of Use:
-vcs.show('taylordiagram')                    # Show all the existing taylordiagram graphics methods
-td=vcs.gettaylordiagram()                    # td instance of 'default' taylordiagram graphics
-                                           # method
-td2=vcs.gettaylordiagram('default')          # td2 instance of existing 'default' taylordiagram
-                                           #         graphics method
-                                    """
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createboxfill function.)
+
+    :Example:
+
+::
+
+    # Show all the existing taylordiagram graphics methods
+    vcs.show('taylordiagram')
+    # td instance of 'default' taylordiagram graphics method
+    td=vcs.gettaylordiagram()
+    # td2 instance of existing 'default' taylordiagram graphics method
+    td2=vcs.gettaylordiagram('default')
+
+:param Gtd_name_src: String name of an existing taylordiagram VCS object
+:type Gtd_name_src: str
+
+:returns: A taylordiagram VCS object
+:rtype: vcs.taylor.Gtd
+                                        """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gtd_name_src, str):
@@ -333,24 +395,34 @@ td2=vcs.gettaylordiagram('default')          # td2 instance of existing 'default
 
 def createmeshfill(name=None, source='default'):
     """
-Function: createmeshfill                # Construct a new meshfill graphics method
+    Create a new meshfill graphics method given the the name and the existing
+    meshfill graphics method to copy the attributes from. If no existing
+    meshfill graphics method name is given, then the default meshfill graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
+
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('meshfill')
+    mesh=vcs.createmeshfill('example1')
+    vcs.show('meshfill')
+    mesh=vcs.createmeshfill('example2','quick')
+    vcs.show('meshfill')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a meshfill or a string name of a meshfill
+
+:returns: A meshfill graphics method object
+:rtype: vcs.meshfill.Gfm
 
-Description of Function:
-Create a new meshfill graphics method given the the name and the existing
-meshfill graphics method to copy the attributes from. If no existing
-meshfill graphics method name is given, then the default meshfill graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('meshfill')
-mesh=vcs.createmeshfill('example1',)
-vcs.show('meshfill')
-mesh=vcs.createmeshfill('example2','quick')
-vcs.show('meshfill')
 """
     name, source = check_name_source(name, source, 'meshfill')
     return meshfill.Gfm(name, source)
@@ -358,25 +430,32 @@ vcs.show('meshfill')
 
 def getmeshfill(Gfm_name_src='default'):
     """
-Function: getmeshfill                        # Construct a new meshfill graphics method
+    VCS contains a list of graphics methods. This function will create a
+    meshfill class object from an existing VCS meshfill graphics method. If
+    no meshfill name is given, then meshfill 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-meshfill class object from an existing VCS meshfill graphics method. If
-no meshfill name is given, then meshfill 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createmeshfill function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createmeshfill function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('meshfill')                   # Show all the existing meshfill graphics methods
-mesh=a.getmeshfill()                  # mesh instance of 'default' meshfill graphics
-                                    # method
-mesh2=a.getmeshfill('quick')          # mesh2 instance of existing 'quick' meshfill
-                                    #         graphics method
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing meshfill graphics methods
+    a.show('meshfill')
+    # mesh instance of 'default' meshfill graphics method
+    mesh=a.getmeshfill()
+    # mesh2 instance of existing 'quick' meshfill graphics method
+    mesh2=a.getmeshfill('quick')
+
+:param Gfm_name_src: String name of an existing meshfill VCS object
+:type Gfm_name_src: str
+
+:returns: A meshfill VCS object
+:rtype: vcs.meshfill.Gfm
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gfm_name_src, str):
@@ -402,24 +481,33 @@ Output:::
 %s
 :::
 
-Function: createisofill  # Construct a new isofill graphics method
+    Create a new isofill graphics method given the the name and the existing
+    isofill graphics method to copy the attributes from. If no existing
+    isofill graphics method name is given, then the default isofill graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new isofill graphics method given the the name and the existing
-isofill graphics method to copy the attributes from. If no existing
-isofill graphics method name is given, then the default isofill graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('isofill')
-iso=vcs.createisofill('example1',)
-vcs.show('isofill')
-iso=vcs.createisofill('example2','quick')
-vcs.show('isofill')
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('isofill')
+    iso=vcs.createisofill('example1')
+    vcs.show('isofill')
+    iso=vcs.createisofill('example2','quick')
+    vcs.show('isofill')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: an isofill object, or string name of an isofill object
+
+:returns: An isofill graphics method
+:rtype: vcs.isofill.Gfi
 
 """
 
@@ -443,24 +531,30 @@ Output:::
 %s
 :::
 
-Function: getisofill          Construct a new isofill graphics method
+    VCS contains a list of graphics methods. This function will create a
+    isofill class object from an existing VCS isofill graphics method. If
+    no isofill name is given, then isofill 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-isofill class object from an existing VCS isofill graphics method. If
-no isofill name is given, then isofill 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createisofill function.)
+
+    :Example:
+
+::
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createisofill function.)
+    # Show all the existing isofill graphics methods
+    vcs.show('isofill')
+    # iso instance of 'default' isofill graphics method
+    iso=vcs.getisofill()
+    # iso2 instance of existing 'quick' isofill graphics method
+    iso2=vcs.getisofill('quick')
 
-Example of Use:
-vcs.show('isofill')                   # Show all the existing isofill graphics methods
-iso=vcs.getisofill()                  # iso instance of 'default' isofill graphics
-                                    #       method
-iso2=vcs.getisofill('quick')          # iso2 instance of existing 'quick' isofill
-                                    #       graphics method
+:param Gfi_name_src: String name of an existing isofill VCS object
+:type Gfi_name_src: str
 
+:returns: The specified isofill VCS object
+:rtype: vcs.isofill.Gfi
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -488,26 +582,31 @@ Output:::
 %s
 :::
 
-Function: createisoline                # Construct a new isoline graphics method
+    Create a new isoline graphics method given the the name and the existing
+    isoline graphics method to copy the attributes from. If no existing
+    isoline graphics method name is given, then the default isoline graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new isoline graphics method given the the name and the existing
-isoline graphics method to copy the attributes from. If no existing
-isoline graphics method name is given, then the default isoline graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    vcs.show('isoline')
+    iso=vcs.createisoline('example1')
+    vcs.show('isoline')
+    iso=vcs.createisoline('example2','quick')
+    vcs.show('isoline')
 
-Example of Use:
+:param name: The name of the created object
+:type name: str
 
-vcs.show('isoline')
-iso=vcs.createisoline('example1',)
-vcs.show('isoline')
-iso=vcs.createisoline('example2','quick')
-vcs.show('isoline')
+:param source: The object to inherit from
+:type source: an isoline object, or string name of an isoline object
 
+:returns: An isoline graphics method object
+:rtype: vcs.isoline.Gi
 """
 
     name, source = check_name_source(name, source, 'isoline')
@@ -529,30 +628,31 @@ Input:::
 Output:::
 %s
 :::
+    VCS contains a list of graphics methods. This function will create a
+    isoline class object from an existing VCS isoline graphics method. If
+    no isoline name is given, then isoline 'default' will be used.
 
-Function: getisoline                        # Construct a new isoline graphics method
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createisoline function.)
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-isoline class object from an existing VCS isoline graphics method. If
-no isoline name is given, then isoline 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createisoline function.)
-
-Example of Use:
-vcs.show('isoline')                   # Show all the existing isoline graphics methods
-iso=vcs.getisoline()                  # iso instance of 'default' isoline graphics
-                                    #       method
-iso2=vcs.getisoline('quick')          # iso2 instance of existing 'quick' isoline
-gm.linewidth=0
-                                    #       graphics method
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End getisoline Description ################################################
-#########################################                            #################################################
-######################################################################################################################
+    :Example:
+
+::
+
+    # Show all the existing isoline graphics methods
+    vcs.show('isoline')
+    # iso instance of 'default' isoline graphics method
+    iso=vcs.getisoline()
+    # iso2 instance of existing 'quick' isoline graphics method
+    iso2=vcs.getisoline('quick')
+    gm.linewidth=0
+
+:param Gi_name_src: String name of an existing isoline VCS object
+:type Gi_name_src: str
+
+:returns: The requested isoline VCS object
+:rtype: vcs.isoline.Gi
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -593,33 +693,35 @@ Input:::
 Output:::
 %s
 :::
+    Create a new Xyvsy graphics method given the the name and the existing
+    Xyvsy graphics method to copy the attributes from. If no existing
+    Xyvsy graphics method name is given, then the default Xyvsy graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Function: createxyvsy                  # Construct a new Xyvsy graphics method
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-Description of Function:
-Create a new Xyvsy graphics method given the the name and the existing
-Xyvsy graphics method to copy the attributes from. If no existing
-Xyvsy graphics method name is given, then the default Xyvsy graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    :Example:
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+::
 
-Example of Use:
+    a=vcs.init()
+    vcs.show('xyvsy')
+    xyy=vcs.createxyvsy('example1',)
+    vcs.show('xyvsy')
+    xyy=vcs.createxyvsy('example2','quick')
+    vcs.show('xyvsy')
 
-a=vcs.init()
-vcs.show('xyvsy')
-xyy=vcs.createxyvsy('example1',)
-vcs.show('xyvsy')
-xyy=vcs.createxyvsy('example2','quick')
-vcs.show('xyvsy')
-
-#######################################################################################################################
-###########################################                             ###############################################
-########################################## End createxyvsy Description ################################################
-#########################################                             #################################################
-#######################################################################################################################
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a xyvsy or a string name of a xyvsy
+
+:returns: A XYvsY graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -652,29 +754,31 @@ Output:::
 %s
 :::
 
-Function: getxyvsy        # Construct a new Xyvsy graphics method
+    VCS contains a list of graphics methods. This function will create a
+    Xyvsy class object from an existing VCS Xyvsy graphics method. If
+    no Xyvsy name is given, then Xyvsy 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-Xyvsy class object from an existing VCS Xyvsy graphics method. If
-no Xyvsy name is given, then Xyvsy 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createxyvsy function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createxyvsy function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('xyvsy')                     # Show all the existing Xyvsy graphics methods
-xyy=vcs.getxyvsy()                    # xyy instance of 'default' Xyvsy graphics
-                                    #       method
-xyy2=vcs.getxyvsy('quick')            # xyy2 instance of existing 'quick' Xyvsy
-                                    #       graphics method
-####################################################################################################################
-###########################################                          ###############################################
-########################################## End getxyvsy Description ################################################
-#########################################                          #################################################
-####################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing Xyvsy graphics methods
+    vcs.show('xyvsy')
+    # xyy instance of 'default' Xyvsy graphics method
+    xyy=vcs.getxyvsy('default_xyvsy_')
+     # xyy2 instance of existing 'quick' Xyvsy graphics method
+    xyy2=vcs.getxyvsy('quick')
+
+:param GXy_name_src: String name of an existing Xyvsy graphics method
+:type GXy_name_src: str
+
+:returns: An Xyvsy graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     gm = vcs.get1d(GXy_name_src)
@@ -699,33 +803,35 @@ Input:::
 Output:::
 %s
 :::
+    Create a new Yxvsx graphics method given the the name and the existing
+    Yxvsx graphics method to copy the attributes from. If no existing
+    Yxvsx graphics method name is given, then the default Yxvsx graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Function: createyxvsx                  # Construct a new Yxvsx graphics method
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-Description of Function:
-Create a new Yxvsx graphics method given the the name and the existing
-Yxvsx graphics method to copy the attributes from. If no existing
-Yxvsx graphics method name is given, then the default Yxvsx graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    :Example:
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+::
 
-Example of Use:
+    a=vcs.init()
+    vcs.show('yxvsx')
+    yxx=vcs.createyxvsx('example1',)
+    vcs.show('yxvsx')
+    yxx=vcs.createyxvsx('example2','quick')
+    vcs.show('yxvsx')
 
-a=vcs.init()
-vcs.show('yxvsx')
-yxx=vcs.createyxvsx('example1',)
-vcs.show('yxvsx')
-yxx=vcs.createyxvsx('example2','quick')
-vcs.show('yxvsx')
-
-#######################################################################################################################
-###########################################                             ###############################################
-########################################## End createyxvsx Description ################################################
-#########################################                             #################################################
-#######################################################################################################################
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a yxvsy or a string name of a yxvsy
+
+:returns: A YXvsX graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -757,30 +863,31 @@ Output:::
 %s
 :::
 
-Function: getyxvsx                     # Construct a new Yxvsx graphics method
+    VCS contains a list of graphics methods. This function will create a
+    Yxvsx class object from an existing VCS Yxvsx graphics method. If
+    no Yxvsx name is given, then Yxvsx 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-Yxvsx class object from an existing VCS Yxvsx graphics method. If
-no Yxvsx name is given, then Yxvsx 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createyxvsx function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createyxvsx function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('yxvsx')                     # Show all the existing Yxvsx graphics methods
-yxx=vcs.getyxvsx()                    # yxx instance of 'default' Yxvsx graphics
-                                    #       method
-yxx2=vcs.getyxvsx('quick')            # yxx2 instance of existing 'quick' Yxvsx
-                                    #       graphics method
-####################################################################################################################
-###########################################                          ###############################################
-########################################## End getyxvsx Description ################################################
-#########################################                          #################################################
-####################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing Yxvsx graphics methods
+    vcs.show('yxvsx')
+    # yxx instance of 'default' Yxvsx graphics method
+    yxx=vcs.getyxvsx()
+    # yxx2 instance of existing 'quick' Yxvsx graphics method
+    yxx2=vcs.getyxvsx('quick')
 
+:param GYx_name_src: String name of an existing Yxvsx graphics method
+:type GYx_name_src: str
+
+:return: A Yxvsx graphics method object
+:rtype: vcs.unified1D.G1d
 """
     gm = vcs.get1d(GYx_name_src)
     if gm.g_type != "yxvsx":
@@ -804,31 +911,35 @@ Output:::
 %s
 :::
 
-Function: createxvsy                      # Construct a new XvsY graphics method
+    Create a new XvsY graphics method given the the name and the existing
+    XvsY graphics method to copy the attributes from. If no existing
+    XvsY graphics method name is given, then the default XvsY graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new XvsY graphics method given the the name and the existing
-XvsY graphics method to copy the attributes from. If no existing
-XvsY graphics method name is given, then the default XvsY graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('xvsy')
-xy=vcs.createxvsy('example1',)
-vcs.show('xvsy')
-xy=vcs.createxvsy('example2','quick')
-vcs.show('xvsy')
-
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End createxvsy Description ################################################
-#########################################                            #################################################
-######################################################################################################################
+::
+
+    a=vcs.init()
+    vcs.show('xvsy')
+    xy=vcs.createxvsy('example1',)
+    vcs.show('xvsy')
+    xy=vcs.createxvsy('example2','quick')
+    vcs.show('xvsy')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a xvsy or a string name of a xvsy
+
+:returns: A XvsY graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -860,31 +971,31 @@ Output:::
 %s
 :::
 
-Function: getxvsy                   # Construct a new XvsY graphics method
+    VCS contains a list of graphics methods. This function will create a
+    XvsY class object from an existing VCS XvsY graphics method. If
+    no XvsY name is given, then XvsY 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-XvsY class object from an existing VCS XvsY graphics method. If
-no XvsY name is given, then XvsY 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createxvsy function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createxvsy function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('xvsy')                      # Show all the existing XvsY graphics methods
-xy=vcs.getxvsy()                      # xy instance of 'default' XvsY graphics
-                                    #       method
-xy2=vcs.getxvsy('quick')              # xy2 instance of existing 'quick' XvsY
-                                    #       graphics method
-
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End getxvsy Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing XvsY graphics methods
+    vcs.show('xvsy')
+    # xy instance of 'default' XvsY graphics method
+    xy=vcs.getxvsy()
+    # xy2 instance of existing 'quick' XvsY graphics method
+    xy2=vcs.getxvsy('quick')
+
+:param GXY_name_src: String name of a 1d graphics method
+:type GXY_name_src: str
 
+:returns: A XvsY graphics method object
+:rtype: vcs.unified1D.G1d
 """
     gm = vcs.get1d(GXY_name_src)
     # Deliberately yxvsx here; xvsy is just an alias
@@ -897,51 +1008,69 @@ getxvsy.__doc__ = getxvsy.__doc__ % (
 
 def createvector(name=None, source='default'):
     """
-Function: createvector                # Construct a new vector graphics method
+        Create a new vector graphics method given the the name and the existing
+        vector graphics method to copy the attributes from. If no existing
+        vector graphics method name is given, then the default vector graphics
+        method will be used as the graphics method to which the attributes will
+        be copied from.
 
-Description of Function:
-Create a new vector graphics method given the the name and the existing
-vector graphics method to copy the attributes from. If no existing
-vector graphics method name is given, then the default vector graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+        If the name provided already exists, then an error will be returned. Graphics
+        method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+        :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('vector')
-vec=vcs.createvector('example1',)
-vcs.show('vector')
-vec=vcs.createvector('example2','quick')
-vcs.show('vector')
-"""
+::
+
+        a=vcs.init()
+        vcs.show('vector')
+        vec=vcs.createvector('example1',)
+        vcs.show('vector')
+        vec=vcs.createvector('example2','quick')
+        vcs.show('vector')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a vector or a string name of a vector
+
+:returns: A vector graphics method object
+:rtype: vcs.vector.Gv
+
+    """
     name, source = check_name_source(name, source, 'vector')
     return vector.Gv(name, source)
 
 
 def getvector(Gv_name_src='default'):
     """
-Function: getvector                   # Construct a new vector graphics method
+    VCS contains a list of graphics methods. This function will create a
+    vector class object from an existing VCS vector graphics method. If
+    no vector name is given, then vector 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-vector class object from an existing VCS vector graphics method. If
-no vector name is given, then vector 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createvector function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createvector function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('vector')                   # Show all the existing vector graphics methods
-vec=vcs.getvector()                  # vec instance of 'default' vector graphics
-                                    #       method
-vec2=vcs.getvector('quick')          # vec2 instance of existing 'quick' vector
-                                    #       graphics method
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing vector graphics methods
+    vcs.show('vector')
+    # vec instance of 'default' vector graphics method
+    vec=vcs.getvector()
+    # vec2 instance of existing 'quick' vector graphics method
+    vec2=vcs.getvector('quick')
+
+:param Gv_name_src: String name of an existing vector VCS object
+:type Gv_name_src: str
+
+:returns: A vector graphics method object
+:rtype: vcs.vector.Gv
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gv_name_src, str):
@@ -965,25 +1094,34 @@ Output:::
 %s
 :::
 
-Function: createscatter                # Construct a new scatter graphics method
+    Create a new scatter graphics method given the the name and the existing
+    scatter graphics method to copy the attributes from. If no existing
+    scatter graphics method name is given, then the default scatter graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new scatter graphics method given the the name and the existing
-scatter graphics method to copy the attributes from. If no existing
-scatter graphics method name is given, then the default scatter graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('scatter')
-sct=vcs.createscatter('example1',)
-vcs.show('scatter')
-sct=vcs.createscatter('example2','quick')
-vcs.show('scatter')
+::
+
+    a=vcs.init()
+    vcs.show('scatter')
+    sct=vcs.createscatter('example1',)
+    vcs.show('scatter')
+    sct=vcs.createscatter('example2','quick')
+    vcs.show('scatter')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a scatter or a string name of a scatter
+
+:return: A scatter graphics method
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -1016,31 +1154,31 @@ Output:::
 %s
 :::
 
-Function: getscatter                   # Construct a new scatter graphics method
+    VCS contains a list of graphics methods. This function will create a
+    scatter class object from an existing VCS scatter graphics method. If
+    no scatter name is given, then scatter 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-scatter class object from an existing VCS scatter graphics method. If
-no scatter name is given, then scatter 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createscatter function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createscatter function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('scatter')                   # Show all the existing scatter graphics methods
-sct=vcs.getscatter()                  # sct instance of 'default' scatter graphics
-                                    #       method
-sct2=vcs.getscatter('quick')          # sct2 instance of existing 'quick' scatter
-                                    #       graphics method
-
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End getscatter Description ################################################
-#########################################                            #################################################
-######################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing scatter graphics methods
+    vcs.show('scatter')
+    # sct instance of 'default' scatter graphics method
+    sct=vcs.getscatter('default_scatter_')
+    # sct2 instance of existing 'quick' scatter graphics method
+    sct2=vcs.getscatter('quick')
+
+:param GSp_name_src: String name of an existing scatter VCS object.
+:type GSp_name_src: str
 
+:returns: A scatter graphics method object
+:rtype: vcs.unified1D.G1d
 """
     gm = vcs.get1d(GSp_name_src)
     if gm.g_type != "scatter":
@@ -1055,31 +1193,70 @@ def createline(name=None, source='default', ltype=None,
                viewport=None, worldcoordinate=None,
                x=None, y=None, projection=None):
     """
-Function: createline                       # Construct a new line secondary method
+    Create a new line secondary method given the the name and the existing
+    line secondary method to copy the attributes from. If no existing line
+    secondary method name is given, then the default line secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new line secondary method given the the name and the existing
-line secondary method to copy the attributes from. If no existing line
-secondary method name is given, then the default line secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('line')
-ln=vcs.createline('example1',)
-vcs.show('line')
-ln=vcs.createline('example2','black')
-vcs.show('line')
-ln2=vcs.createline(name='new', name_src='red',ltype='dash', width=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of line object 'red'
-vcs.line(ln2)                      # Plot using specified line object
+::
+
+    a=vcs.init()
+    vcs.show('line')
+    ln=vcs.createline('example1')
+    vcs.show('line')
+    ln=vcs.createline('example2','black')
+    vcs.show('line')
+    # Create instance of line object 'red'
+    ln2=vcs.createline(name='new', name_src='red',ltype='dash', width=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified line object
+    vcs.line(ln2)
+
+:param name: Name of created object
+:type name: str
+
+:param source: a line, or string name of a line
+:type source: str
+
+:param ltype: One of "dash", "dash-dot", "solid", "dot", or "long-dash".
+:type ltype: str
+
+:param width: Thickness of the line to be created
+:type width: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the line will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.
+:type projection: str or projection object
+
+:returns: A VCS line secondary method object
+:rtype: vcs.line.Tl
 """
 
     name, source = check_name_source(name, source, 'line')
@@ -1111,38 +1288,72 @@ def getline(name='default', ltype=None, width=None, color=None,
             worldcoordinate=None,
             x=None, y=None):
     """
-Function: getline        # Construct a new line secondary method
+    VCS contains a list of secondary methods. This function will create a
+    line class object from an existing VCS line secondary method. If
+    no line name is given, then line 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-line class object from an existing VCS line secondary method. If
-no line name is given, then line 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createline function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createline function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('line')                   # Show all the existing line secondary methods
-ln=vcs.getline()                   # ln instance of 'default' line secondary
-                                 #       method
-ln2=vcs.getline('quick')           # ln2 instance of existing 'quick' line
-                                 #       secondary method
-ln3=vcs.getline(name='red', ltype='dash', width=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of line object 'red'
-vcs.line(ln3)                      # Plot using specified line object
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing line secondary methods
+    vcs.show('line')
+    # ln instance of 'default' line secondary method
+    ln=vcs.getline()
+    # ln2 instance of existing 'quick' line secondary method
+    ln2=vcs.getline('quick')
+    # Create instance of line object 'red'
+    ln3=vcs.getline(name='red', ltype='dash', width=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50],
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified line object
+    vcs.line(ln3)
+
+:param name: Name of created object
+:type name: str
+
+:param ltype: One of "dash", "dash-dot", "solid", "dot", or "long-dash".
+:type ltype: str
+
+:param width: Thickness of the line to be created
+:type width: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A VCS line object
+:rtype: vcs.line.Tl
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
         raise vcsError('The argument must be a string.')
 
     if name not in vcs.elements["line"]:
-        raise ValueError("The line '%s' does not exists" % name)
+        raise ValueError("The line '%s' does not exist" % name)
     ln = vcs.elements["line"][name]
     if ltype is not None and ln.name != 'default':
         ln.type = ltype
@@ -1170,32 +1381,70 @@ def createmarker(name=None, source='default', mtype=None,
                  viewport=None, worldcoordinate=None,
                  x=None, y=None, projection=None):
     """
-Function: createmarker                   # Construct a new marker secondary method
+    Create a new marker secondary method given the the name and the existing
+    marker secondary method to copy the attributes from. If no existing marker
+    secondary method name is given, then the default marker secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new marker secondary method given the the name and the existing
-marker secondary method to copy the attributes from. If no existing marker
-secondary method name is given, then the default marker secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('marker')
-mrk=vcs.createmarker('example1',)
-vcs.show('marker')
-mrk=vcs.createmarker('example2','black')
-vcs.show('boxfill')
-mrk2=vcs.createmarker(name='new', name_src='red',mtype='dash', size=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of marker object 'red'
-vcs.marker(mrk2)                      # Plot using specified marker object
-"""
+::
+
+    a=vcs.init()
+    vcs.show('marker')
+    mrk=vcs.createmarker('example1',)
+    vcs.show('marker')
+    mrk=vcs.createmarker('example2','black')
+    vcs.show('boxfill')
+    # Create instance of marker object 'red'
+    mrk2=vcs.createmarker(name='new', name_src='red',mtype='dot', size=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified marker object
+    vcs.marker(mrk2)
+
+
+:param name: Name of created object
+:type name: str
+
+:param source: A marker, or string name of a marker
+:type source: str
+
+:param mtype: Specifies the type of marker, i.e. "dot", "circle"
+:type mtype: str
+
+:param size:
+:type size: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A secondary marker method
+:rtype: vcs.marker.Tm
+
+    """
 
     name, source = check_name_source(name, source, 'marker')
 
@@ -1226,31 +1475,69 @@ def getmarker(name='default', mtype=None, size=None, color=None,
               worldcoordinate=None,
               x=None, y=None):
     """
-Function: getmarker                      # Construct a new marker secondary method
+    VCS contains a list of secondary methods. This function will create a
+    marker class object from an existing VCS marker secondary method. If
+    no marker name is given, then marker 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-marker class object from an existing VCS marker secondary method. If
-no marker name is given, then marker 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createmarker function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createmarker function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('marker')                    # Show all the existing marker secondary methods
-mrk=vcs.getmarker()                   # mrk instance of 'default' marker secondary
-                                    #       method
-mrk2=vcs.getmarker('quick')           # mrk2 instance of existing 'quick' marker
-                                    #       secondary method
-mrk3=vcs.getmarker(name='red', mtype='dash', size=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of marker object 'red'
-vcs.marker(mrk3)                      # Plot using specified marker object
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing marker secondary methods
+    vcs.show('marker')
+    # mrk instance of 'default' marker secondary method
+    mrk=vcs.getmarker()
+    # mrk2 instance of existing 'quick' marker secondary method
+    mrk2=vcs.getmarker('quick')
+    # Create instance of marker object 'red'
+    mrk3=vcs.getmarker(name='red', mtype='dash', size=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified marker object
+    vcs.marker(mrk3)
+
+:param name: Name of created object
+:type name: str
+
+:param source: A marker, or string name of a marker
+:type source: str
+
+:param mtype: Specifies the type of marker, i.e. "dot", "circle"
+:type mtype: str
+
+:param size: Size of the marker
+:type size: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A marker graphics method object
+:rtype: vcs.marker.Tm
+
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
@@ -1283,30 +1570,67 @@ def createfillarea(name=None, source='default', style=None,
                    viewport=None, worldcoordinate=None,
                    x=None, y=None):
     """
-Function: createfillarea     # Construct a new fillarea secondary method
+    Create a new fillarea secondary method given the the name and the existing
+    fillarea secondary method to copy the attributes from. If no existing fillarea
+    secondary method name is given, then the default fillarea secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new fillarea secondary method given the the name and the existing
-fillarea secondary method to copy the attributes from. If no existing fillarea
-secondary method name is given, then the default fillarea secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
-
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
-
-Example of Use:
-vcs.show('fillarea')
-fa=vcs.createfillarea('example1',)
-vcs.show('fillarea')
-fa=vcs.createfillarea('example2','black')
-vcs.show('fillarea')
-fa2=vcs.createmarker(name='new', name_src='red',style=1, index=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of fill area object 'red'
-vcs.fillarea(fa2)                      # Plot using specified fill area object
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('fillarea')
+    fa=vcs.createfillarea('example1',)
+    vcs.show('fillarea')
+    fa=vcs.createfillarea('example2','black')
+    vcs.show('fillarea')
+    # Create instance of fill area object 'red'
+    fa2=vcs.createmarker(name='new', name_src='red',style=1, index=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified fill area object
+    vcs.fillarea(fa2)
+
+:param name: Name of created object
+:type name: str
+
+:param source: a fillarea, or string name of a fillarea
+:type source: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_ to fill with.
+              Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the fillarea will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A fillarea object
+:rtype: vcs.fillarea.Tf
 """
 
     name, source = check_name_source(name, source, 'fillarea')
@@ -1337,36 +1661,72 @@ def getfillarea(name='default', style=None,
                 worldcoordinate=None,
                 x=None, y=None):
     """
-Function: getfillarea              # Construct a new fillarea secondary method
+    VCS contains a list of secondary methods. This function will create a
+    fillarea class object from an existing VCS fillarea secondary method. If
+    no fillarea name is given, then fillarea 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-fillarea class object from an existing VCS fillarea secondary method. If
-no fillarea name is given, then fillarea 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createfillarea function.)
-
-Example of Use:
-vcs.show('fillarea')                 # Show all the existing fillarea secondary methods
-fa=vcs.getfillarea()                 # fa instance of 'default' fillarea secondary
-                                   #       method
-fa2=vcs.getfillarea('quick')         # fa2 instance of existing 'quick' fillarea
-                                    #       secondary method
-fa3=vcs.createmarker(name='new', name='red',style=1, index=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of fill area object 'red'
-vcs.fillarea(fa3)                      # Plot using specified fill area object
-"""
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createfillarea function.)
+
+    :Example:
+
+::
+
+    # Show all the existing fillarea secondary methods
+    vcs.show('fillarea')
+    # fa instance of 'default' fillarea secondary method
+    fa=vcs.getfillarea()
+    # fa2 instance of existing 'quick' fillarea secondary method
+    fa2=vcs.getfillarea('quick')
+    # Create instance of fill area object 'red'
+    fa3=vcs.createfillarea(name='new', name='red',style=1, index=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified fill area object
+    vcs.fillarea(fa3)
+
+:param name: String name of an existing fillarea VCS object
+:type name: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_ to fill with.
+              Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the texttable will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A fillarea secondary object
+:rtype: vcs.fillarea.Tf
+
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
         raise vcsError('The argument must be a string.')
     if name not in vcs.elements["fillarea"].keys():
-        raise vcsError("Fillarea '%s' doe not exists" % (name))
+        raise vcsError("Fillarea '%s' does not exist" % (name))
 
     fa = vcs.elements["fillarea"][name]
     if (style is not None) and (fa.name != "default"):
@@ -1393,31 +1753,69 @@ def createtexttable(name=None, source='default', font=None,
                     viewport=None, worldcoordinate=None,
                     x=None, y=None):
     """
-Function: createtexttable            # Construct a new texttable secondary method
+    Create a new texttable secondary method given the the name and the existing
+    texttable secondary method to copy the attributes from. If no existing texttable
+    secondary method name is given, then the default texttable secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new texttable secondary method given the the name and the existing
-texttable secondary method to copy the attributes from. If no existing texttable
-secondary method name is given, then the default texttable secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
+
+::
+
+    a=vcs.init()
+    vcs.show('texttable')
+    tt=vcs.createtexttable('example1',)
+    vcs.show('texttable')
+    tt=vcs.createtexttable('example2','black')
+    vcs.show('texttable')
+    # Create instance of texttable object 'new'
+    tt=vcs.createtexttable(name='new',name_src='red',font=1,spacing=1,expansion=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified texttable object
+    vcs.texttable(tt)
+
+
+:param name: Name of created object
+:type name: str
+
+:param source: a texttable, or string name of a texttable
+:type source: str
+
+:param font: ???
+:type font: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the texttable will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A texttable graphics method object
+:rtype: vcs.texttable.Tt
 
-Example of Use:
-a=vcs.init()
-vcs.show('texttable')
-tt=vcs.createtexttable('example1',)
-vcs.show('texttable')
-tt=vcs.createtexttable('example2','black')
-vcs.show('texttable')
-tt=vcs.createtexttable(name='new',name_src='red',font=1,spacing=1,expansion=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of texttable object 'new'
-vcs.texttable(tt)                      # Plot using specified texttable object
 """
 
     name, source = check_name_source(name, source, 'texttable')
@@ -1453,31 +1851,66 @@ def gettexttable(name='default', font=None,
                  worldcoordinate=None,
                  x=None, y=None):
     """
-Function: gettexttable           # Construct a new texttable secondary method
+    VCS contains a list of secondary methods. This function will create a
+    texttable class object from an existing VCS texttable secondary method. If
+    no texttable name is given, then texttable 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-texttable class object from an existing VCS texttable secondary method. If
-no texttable name is given, then texttable 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtexttable function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtexttable function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('texttable')              # Show all the existing texttable secondary methods
-tt=vcs.gettexttable()              # tt instance of 'default' texttable secondary
-                                 #       method
-tt2=vcs.gettexttable('quick')      # tt2 instance of existing 'quick' texttable
-                                 #       secondary method
-tt3=vcs.gettexttable(name='red', font=1, spacing=1,expansion=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of texttable object 'red'
-vcs.texttable(tt3)                      # Plot using specified texttable object
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing texttable secondary methods
+    vcs.show('texttable')
+    # tt instance of 'default' texttable secondary method
+    tt=vcs.gettexttable()
+    # tt2 instance of existing 'quick' texttable secondary method
+    tt2=vcs.gettexttable('quick')
+    # Create instance of texttable object 'red'
+    tt3=vcs.gettexttable(name='red', font=1, spacing=1,expansion=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified texttable object
+    vcs.texttable(tt3)
+
+:param name: String name of an existing VCS texttable object
+:type name: str
+
+:param font: ???
+:type font: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the texttable will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A texttable graphics method object
+:rtype: vcs.texttable.Tt
+
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
@@ -1490,24 +1923,34 @@ vcs.texttable(tt3)                      # Plot using specified texttable object
 
 def createtextorientation(name=None, source='default'):
     """
-Function: createtextorientation   # Construct a new textorientation secondary method
+    Create a new textorientation secondary method given the the name and
+    the existing textorientation secondary method to copy the attributes
+    from. If no existing textorientation secondary method name is given,
+    then the default textorientation secondary method will be used as the
+    secondary method to which the attributes will be copied from.
 
-Description of Function:
-Create a new textorientation secondary method given the the name and
-the existing textorientation secondary method to copy the attributes
-from. If no existing textorientation secondary method name is given,
-then the default textorientation secondary method will be used as the
-secondary method to which the attributes will be copied from.
-
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
-
-Example of Use:
-vcs.show('textorientation')
-to=vcs.createtextorientation('example1',)
-vcs.show('textorientation')
-to=vcs.createtextorientation('example2','black')
-vcs.show('textorientation')
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('textorientation')
+    to=vcs.createtextorientation('example1',)
+    vcs.show('textorientation')
+    to=vcs.createtextorientation('example2','black')
+    vcs.show('textorientation')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a textorientation or a string name of a textorientation
+
+:returns: A textorientation secondary method
+:rtype: vcs.textorientation.To
 """
 
     name, source = check_name_source(name, source, 'textorientation')
@@ -1517,26 +1960,33 @@ vcs.show('textorientation')
 
 def gettextorientation(To_name_src='default'):
     """
-Function: gettextorientation       # Construct a new textorientation secondary method
+    VCS contains a list of secondary methods. This function will create
+    a textorientation class object from an existing VCS textorientation
+    secondary method. If no textorientation name is given, then
+    textorientation 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create
-a textorientation class object from an existing VCS textorientation
-secondary method. If no textorientation name is given, then
-textorientation 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtextorientation function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtextorientation function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('textorientation')    # Show all the existing textorientation secondary methods
-to=vcs.gettextorientation()    # to instance of 'default' textorientation secondary
-                             #       method
-to2=vcs.gettextorientation('quick')  # to2 instance of existing 'quick' textorientation
-                                   #       secondary method
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing textorientation secondary methods
+    vcs.show('textorientation')
+    # to instance of 'default' textorientation secondary method
+    to=vcs.gettextorientation()
+    # to2 instance of existing 'quick' textorientation secondary method
+    to2=vcs.gettextorientation('quick')
+
+:param To_name_src: String name of an existing textorientation VCS object
+:type To_name_src: str
+
+:returns: A textorientation VCS object
+:rtype: vcs.textorientation.To
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(To_name_src, str):
@@ -1554,25 +2004,91 @@ def createtextcombined(Tt_name=None, Tt_source='default', To_name=None, To_sourc
                        priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
                        height=None, angle=None, path=None, halign=None, valign=None, projection=None):
     """
-Function: createtext or createtextcombined  # Construct a new text combined secondary method
+    Create a new textcombined secondary method given the the names and
+    the existing texttable and textorientation secondary methods to copy
+    the attributes from. If no existing texttable and textorientation
+    secondary method names are given, then the default texttable and
+    textorientation secondary methods will be used as the secondary method
+    to which the attributes will be copied from.
+
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('texttable')
+    vcs.show('textorientation')
+    tc=vcs.createtextcombined('example1','std','example1','7left')
+    vcs.show('texttable')
+    vcs.show('textorientation')
+
+:param Tt_name: Name of created object
+:type Tt_name: str
+
+:param Tt_source: ???
+:type Tt_source: ???
+
+:param To_name:
+:type To_name: str
+
+:param To_source: ???
+:type To_source: ???
+
+:param font: ???
+:type font: ???
+
+:param spacing: ???
+:type spacing: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the object will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param height: ???
+:type height: ???
+
+:param angle: ???
+:type angle: ???
+
+:param path: ???
+:type path: ???
+
+:param halign: ???
+:type halign: ???
+
+:param valign: ???
+:type valign: ???
+
+:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.
+:type projection: str or projection object
+
+:param projection: ???
+:type projection: ???
+>>>>>>> 350516e7fe7fa36d0cf3452a8cc3bc3104319a1b
+
+:returns: A VCS text object
+:rtype: vcs.textcombined.Tc
 
-Description of Function:
-Create a new textcombined secondary method given the the names and
-the existing texttable and textorientation secondary methods to copy
-the attributes from. If no existing texttable and textorientation
-secondary method names are given, then the default texttable and
-textorientation secondary methods will be used as the secondary method
-to which the attributes will be copied from.
-
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
-
-Example of Use:
-vcs.show('texttable')
-vcs.show('textorientation')
-tc=vcs.createtextcombined('example1','std','example1','7left')
-vcs.show('texttable')
-vcs.show('textorientation')
 """
     # Check if to is defined
     if To_name is None:
@@ -1623,28 +2139,90 @@ def gettextcombined(Tt_name_src='default', To_name_src=None, string=None, font=N
                     priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
                     height=None, angle=None, path=None, halign=None, valign=None):
     """
-Function: gettext or gettextcombined   # Construct a new textcombined secondary method
+    VCS contains a list of secondary methods. This function will create
+    a textcombined class object from an existing VCS texttable secondary
+    method and an existing VCS textorientation secondary method. If no
+    texttable or textorientation names are given, then the 'default' names
+    will be used in both cases.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create
-a textcombined class object from an existing VCS texttable secondary
-method and an existing VCS textorientation secondary method. If no
-texttable or textorientation names are given, then the 'default' names
-will be used in both cases.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtextcombined function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtextcombined function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('texttable')                  # Show all the existing texttable secondary methods
-vcs.show('textorientation')            # Show all the existing textorientation secondary methods
-tc=vcs.gettextcombined()               # Use 'default' for texttable and textorientation
-tc2=vcs.gettextcombined('std','7left') # Use 'std' texttable and '7left' textorientation
-if istextcombined(tc):               # Check to see if tc is a textcombined
-   tc.list()                         # Print out all its attriubtes
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing texttable secondary methods
+    vcs.show('texttable')
+    # Show all the existing textorientation secondary methods
+    vcs.show('textorientation')
+    # Use 'default' for texttable and textorientation
+    tc=vcs.gettextcombined()
+    # Use 'std' texttable and '7left' textorientation
+    tc2=vcs.gettextcombined('std','7left')
+    # Check to see if tc is a textcombined
+    if istextcombined(tc):
+       # Print out all its attriubtes
+       tc.list()
+
+
+:param Tt_name_src: Name of created object
+:type Tt_name_src: str
+
+:param To_name_src: ???
+:type To_name_src: ???
+
+:param string: ???
+:param string: ???
+
+:param font: ???
+:type font: ???
+
+:param spacing: ???
+:type spacing: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the object will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param height: ???
+:type height: ???
+
+:param angle: ???
+:type angle: ???
+
+:param path: ???
+:type path: ???
+
+:param halign: ???
+:type halign: ???
+
+:param valign: ???
+:type valign: ???
+
+:returns: A textcombined object
+:rtype: vcs.textcombined.Tc
+    """
 
     # Check to make sure the arguments passed in are a STRINGS
     if not isinstance(Tt_name_src, str):
@@ -1703,21 +2281,28 @@ gettext = gettextcombined
 
 def get3d_scalar(Gfdv3d_name_src='default'):
     """
-Function: get3d_scalar                        # Construct a new 3Dscalar graphics method
+    VCS contains a list of graphics methods. This function will create a
+    dv3d class object from an existing VCS dv3d graphics method. If
+    no dv3d name is given, then dv3d 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-dv3d class object from an existing VCS dv3d graphics method. If
-no dv3d name is given, then dv3d 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the create3Dscalar function.)
-
-Example of Use:
-a.show('3d_scalar')                      # Show all the existing 3Dscalar graphics methods
-plot=vcs.get3d_scalar()                  # plot instance of 'default' dv3d graphics
-                                        # method
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the create3Dscalar function.)
+
+    :Example:
+
+::
+
+    # Show all the existing 3Dscalar graphics methods
+    a.show('3d_scalar')
+    # plot instance of 'default' dv3d graphics method
+    plot=vcs.get3d_scalar()
+
+:param Gfdv3d_name_src: String name of an existing 3d_scalar VCS object.
+:type Gfdv3d_name_src: str
+
+:returns: A pre-existing 3d_scalar VCS object
+:rtype: vcs.dv3d.Gf3Dscalar
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -1732,47 +2317,60 @@ plot=vcs.get3d_scalar()                  # plot instance of 'default' dv3d graph
 
 def create3d_scalar(name=None, source='default'):
     """
-Function: create3d_scalar                # Construct a new dv3d graphics method
+    Create a new dv3d graphics method given the the name and the existing
+    dv3d graphics method to copy the attributes from. If no existing
+    dv3d graphics method name is given, then the default dv3d graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new dv3d graphics method given the the name and the existing
-dv3d graphics method to copy the attributes from. If no existing
-dv3d graphics method name is given, then the default dv3d graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('3d_scalar')
-plot=a.create3d_scalar()
+::
+
+    a=vcs.init()
+    a.show('3d_scalar')
+    plot=a.create3d_scalar()
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a 3d_scalar or a string name of a 3d_scalar
+
+:returns: A 3d_scalar graphics method object
+:rtype: vcs.dv3d.Gf3Dscalar
 """
-# print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-# create3d_scalar
-# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
     name, source = check_name_source(name, source, '3d_scalar')
     return dv3d.Gf3Dscalar(name, source)
 
 
 def get3d_dual_scalar(Gfdv3d_name_src='default'):
     """
-Function: get3d_dual_scalar                        # Construct a new 3DDualScalar graphics method
+    VCS contains a list of graphics methods. This function will create a
+    dv3d class object from an existing VCS dv3d graphics method. If
+    no dv3d name is given, then dv3d 'default' will be used.
+
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the create3Dscalar function.)
+
+    :Example:
+
+::
+
+    # Show all the existing 3Dscalar graphics methods
+    a.show('3d_dual_scalar')
+    # plot instance of 'default' dv3d graphics method
+    plot=vcs.get3d_dual_scalar()
+
+:param Gfdv3d_name_src: String name of an existing 3d_dual_scalar VCS object
+:type Gfdv3d_name_src: str
+
+:returns: A pre-existing 3d_dual_scalar VCS object
+:rtype: vcs.dv3d.Gf3DDualScalar
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-dv3d class object from an existing VCS dv3d graphics method. If
-no dv3d name is given, then dv3d 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the create3Dscalar function.)
-
-Example of Use:
-a.show('3d_dual_scalar')                      # Show all the existing 3Dscalar graphics methods
-plot=vcs.get3d_dual_scalar()                  # plot instance of 'default' dv3d graphics
-                                        # method
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -1787,47 +2385,60 @@ plot=vcs.get3d_dual_scalar()                  # plot instance of 'default' dv3d
 
 def create3d_dual_scalar(name=None, source='default'):
     """
-Function: create3d_dual_scalar                # Construct a new dv3d graphics method
+    Create a new dv3d graphics method given the the name and the existing
+    dv3d graphics method to copy the attributes from. If no existing
+    dv3d graphics method name is given, then the default dv3d graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new dv3d graphics method given the the name and the existing
-dv3d graphics method to copy the attributes from. If no existing
-dv3d graphics method name is given, then the default dv3d graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('3d_dual_scalar')
-plot=a.create3d_dual_scalar()
+::
+
+    a=vcs.init()
+    a.show('3d_dual_scalar')
+    plot=a.create3d_dual_scalar()
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a 3d_dual_scalar or a string name of a 3d_dual_scalar
+
+:returns: A 3d_dual_scalar graphics method object
+:rtype: vcs.dv3d.Gf3DDualScalar
 """
-# print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-# create3d_scalar
-# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
     name, source = check_name_source(name, source, '3d_dual_scalar')
     return dv3d.Gf3DDualScalar(name, source)
 
 
 def get3d_vector(Gfdv3d_name_src='default'):
     """
-Function: get3d_vector                        # Construct a new 3Dvector graphics method
+    VCS contains a list of graphics methods. This function will create a
+    dv3d class object from an existing VCS dv3d graphics method. If
+    no dv3d name is given, then dv3d 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-dv3d class object from an existing VCS dv3d graphics method. If
-no dv3d name is given, then dv3d 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the create3Dvector function.)
-
-Example of Use:
-a.show('3d_vector')                      # Show all the existing 3Dvector graphics methods
-plot=vcs.get3d_vector()                  # plot instance of 'default' dv3d graphics
-                                        # method
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the create3Dvector function.)
+
+    :Example:
+
+::
+
+    # Show all the existing 3Dvector graphics methods
+    a.show('3d_vector')
+    # plot instance of 'default' dv3d graphics method
+    plot=vcs.get3d_vector()
+
+:param Gfdv3d_name_src: String name of an existing 3d_vector VCS object
+:type Gfdv3d_name_src: str
+
+:returns: A pre-existing 3d_vector VCS object
+:rtype: vcs.dv3d.Gf3Dvector
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -1842,23 +2453,33 @@ plot=vcs.get3d_vector()                  # plot instance of 'default' dv3d graph
 
 def create3d_vector(name=None, source='default'):
     """
-Function: createdv3d                # Construct a new dv3d graphics method
+    Create a new dv3d graphics method given the the name and the existing
+    dv3d graphics method to copy the attributes from. If no existing
+    dv3d graphics method name is given, then the default dv3d graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new dv3d graphics method given the the name and the existing
-dv3d graphics method to copy the attributes from. If no existing
-dv3d graphics method name is given, then the default dv3d graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('3Dvector')
-plot=a.create3d_vector()
+::
+
+    a=vcs.init()
+    a.show('3d_vector')
+    plot=a.create3d_vector()
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a 3d_vector or a string name of a 3d_vector
+
+:returns: A 3d_vector graphics method object
+:rtype: vcs.dv3d.Gf3Dvector
 """
+
     name, source = check_name_source(name, source, '3d_vector')
     return dv3d.Gf3Dvector(name, source)
 
@@ -1871,25 +2492,33 @@ plot=a.create3d_vector()
 
 def createcolormap(Cp_name=None, Cp_name_src='default'):
     """
-Function: createcolormap               # Construct a new colormap secondary method
+    Create a new colormap secondary method given the the name and the existing
+    colormap secondary method to copy the attributes from. If no existing colormap
+    secondary method name is given, then the default colormap secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new colormap secondary method given the the name and the existing
-colormap secondary method to copy the attributes from. If no existing colormap
-secondary method name is given, then the default colormap secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-cp=a.createcolormap('example1',)
-a.show('colormap')
-cp=a.createcolormap('example2','AMIP')
-a.show('colormap')
+::
+
+    a=vcs.init()
+    cp=a.createcolormap('example1',)
+    a.show('colormap')
+    cp=a.createcolormap('example2','AMIP')
+    a.show('colormap')
+
+:param Cp_name: The name of the created object
+:type Cp_name: str
 
+:param Cp_name_src: The object to inherit
+:type Cp_name_src: a colormap or a string name of a colormap
+
+:returns: A VCS colormap object
+:rtype: vcs.colormap.Cp
 """
     Cp_name, Cp_name_src = check_name_source(Cp_name, Cp_name_src, 'colormap')
     return colormap.Cp(Cp_name, Cp_name_src)
@@ -1897,24 +2526,31 @@ a.show('colormap')
 
 def getcolormap(Cp_name_src='default'):
     """
-Function: getcolormap                      # Construct a new colormap secondary method
+    VCS contains a list of secondary methods. This function will create a
+    colormap class object from an existing VCS colormap secondary method. If
+    no colormap name is given, then colormap 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-colormap class object from an existing VCS colormap secondary method. If
-no colormap name is given, then colormap 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createcolormap function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createcolormap function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('colormap')                      # Show all the existing colormap secondary methods
-cp=a.getcolormap()                      # cp instance of 'default' colormap secondary
-                                        #       method
-cp2=a.getcolormap('quick')              # cp2 instance of existing 'quick' colormap
-                                        #       secondary method
+::
+
+    a=vcs.init()
+    # Show all the existing colormap secondary methods
+    a.show('colormap')
+    # cp instance of 'default' colormap secondary method
+    cp=a.getcolormap()
+    # cp2 instance of existing 'quick' colormap secondary method
+    cp2=a.getcolormap('quick')
+
+:param Cp_name_src: String name of an existing colormap VCS object
+:type Cp_name_src: str
+
+:returns: A pre-existing VCS colormap object
+:rtype: vcs.colormap.Cp
 """
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Cp_name_src, str):
@@ -2043,7 +2679,7 @@ use the "del" function.
 Also note, The user is not allowed to remove a "default" class
 object.
 
-Example of Use:
+:Example:
 a=vcs.init()
 line=a.getline('red')       # To Modify an existing line object
 iso=x.createisoline('dean') # Create an instance of an isoline object
diff --git a/Packages/vcs/vcs/projection.py b/Packages/vcs/vcs/projection.py
index a8476a9892996700b22715648026776390b3efce..cabf461eb5647d8439ea175d493342b1f5d1128e 100644
--- a/Packages/vcs/vcs/projection.py
+++ b/Packages/vcs/vcs/projection.py
@@ -16,13 +16,15 @@ import VCS_validation_functions
 import vcs
 import copy
 
-# projection that seems to be doing a circle
-# We will probably to add some more in it as we find more that fit this
-round_projections = ['polar (non gctp)', 'stereographic',
-                     'orthographic', "ortho", ]
+# used to decide if we show longitude labels for round projections or
+# latitude labels for elliptical projections
+round_projections = ['polar (non gctp)', 'stereographic']
+elliptical_projections = ["robinson", "mollweide", 'orthographic', "ortho"]
+# projections in this list are not deformed based on the window size
+no_deformation_projections = ['polar (non gctp)', 'stereographic',
+                              'orthographic', "ortho", ]
 
 no_over_proj4_parameter_projections = round_projections+["aeqd", "lambert conformal c"]
-elliptical_projections = ["robinson", "mollweide"]
 
 
 def process_src(nm, code):
@@ -325,6 +327,12 @@ class Proj(object):
         print 'name =', self.name
         print 'type =', self.type
         # print 'parameters =',self.parameters
+
+        for att in self.attributes:
+            print att, '=', getattr(self, att)
+
+    @property
+    def attributes(self):
         p = []
         if self._type in [3, 4]:
             p.append('smajor')
@@ -453,8 +461,7 @@ class Proj(object):
             p.append('centerlatitude')
             p.append('falseeasting')
             p.append('falsenorthing')
-        for att in p:
-            print att, '=', getattr(self, att)
+        return p
 
     ##########################################################################
     #                                                                           #
diff --git a/Packages/vcs/vcs/queries.py b/Packages/vcs/vcs/queries.py
index dc4ffa418c8ce53f39c9657e4e05b8c4ad3d1c89..3974ff90a80071401bfa41982aab11cefc5dc05b 100644
--- a/Packages/vcs/vcs/queries.py
+++ b/Packages/vcs/vcs/queries.py
@@ -44,7 +44,6 @@ import dv3d
 import displayplot
 import projection
 import vcs
-import vcsaddons
 
 from error import vcsError
 
@@ -69,6 +68,7 @@ box=a.getboxfill('quick')  # To Modify an existing boxfill use:
 if vcs.isgraphicsmethod(box):
    box.list()
 """
+    import vcsaddons
     if (isinstance(gobj, boxfill.Gfb)):
         return 1
     elif (isinstance(gobj, isofill.Gfi)):
@@ -134,6 +134,7 @@ print vcs.graphicsmethodtype(iso)         # Will print 'isofill'
 print vcs.graphicsmethodtype(ln)          # Will print None, because ln is not a
                                           #         graphics method
 """
+    import vcsaddons
     if (isinstance(gobj, boxfill.Gfb)):
         return 'boxfill'
     elif (isinstance(gobj, isofill.Gfi)):
diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py
index 5e29417457418aba48e704fae1c0134be4abd2bb..adabacda08768d31ffb70607d8a52494d7b9b1fe 100644
--- a/Packages/vcs/vcs/template.py
+++ b/Packages/vcs/vcs/template.py
@@ -56,6 +56,18 @@ def _setgen(self, name, cls, value):
     setattr(self, "_%s" % name, value)
 
 
+def epsilon_gte(a, b):
+    """a >= b, using floating point epsilon value."""
+    float_epsilon = numpy.finfo(numpy.float32).eps
+    return -float_epsilon < a - b
+
+
+def epsilon_lte(a, b):
+    """a <= b, using floating point epsilon value."""
+    float_epsilon = numpy.finfo(numpy.float32).eps
+    return float_epsilon > a - b
+
+
 # read .scr file
 def process_src(nm, code):
     """Takes VCS script code (string) as input and generates boxfill gm from it"""
@@ -1474,7 +1486,10 @@ class P(object):
                                                       axis=" ".join(["(%s)" %
                                                                      S for S in slab.getAxisIds()])))
                         except:
-                            meanstring = 'Mean %.4g' % slab.mean()
+                            try:
+                                meanstring = 'Mean %.4g' % slab.mean()
+                            except:
+                                meanstring = 'Mean %.4g' % numpy.mean(slab.filled())
                     tt.string = meanstring
                 else:
                     tt.string = str(getattr(slab, s))
@@ -1848,7 +1863,7 @@ class P(object):
                    startlong])
         # Now make sure we have a legend
         if isinstance(levels[0], list):
-            # Ok these are nono contiguous levels, we will use legend only if
+            # Ok these are non-contiguous levels, we will use legend only if
             # it's a perfect match
             for i, l in enumerate(levels):
                 lt = l[0]
@@ -1873,29 +1888,31 @@ class P(object):
         else:
             if legend is None:
                 legend = vcs.mklabels(levels)
+            # We'll use the less precise float epsilon since this is just for labels
             if levels[0] < levels[1]:
-                ecompfunc = numpy.less_equal
-                compfunc = numpy.less
+                comparison = epsilon_lte
             else:
-                ecompfunc = numpy.greater_equal
-                compfunc = numpy.greater
+                comparison = epsilon_gte
+
+            def in_bounds(x):
+                return comparison(levels[0], x) and comparison(x, levels[-1])
+
             dlong = dD / (len(levels) - 1)
+
             for l in legend.keys():
-                if not compfunc(l, levels[0]) and not compfunc(levels[-1], l):
+                if in_bounds(l):
                     for i in range(len(levels) - 1):
-                        if ecompfunc(levels[i], l) and ecompfunc(
-                                l, levels[i + 1]):
-                            # Ok we're between 2 levels, let's add the legend
-                            # first let's figure out where to put it
-                            loc = i * dlong  # position at beginnig of level
-                            # Adds the distance from beginnig of level box
-                            loc += (l - levels[i]) / \
-                                (levels[i + 1] - levels[i]) * dlong
-                            loc += startlong  # Figures out the begining
-    # loc=((l-levels[0])/(levels[-1]-levels[0]))*dD+startlong
-                            Ll.append([loc, loc])
+                        # if legend key is (inclusive) between levels[i] and levels[i+1]
+                        if comparison(levels[i], l) and comparison(l, levels[i + 1]):
+                            # first let's figure out where to put the legend label
+                            location = i * dlong  # position at beginning of level
+                            # Adds the distance from beginning of level box
+                            location += (l - levels[i]) / (levels[i + 1] - levels[i]) * dlong
+                            location += startlong  # Figures out the beginning
+
+                            Ll.append([location, location])
                             Sl.append([startshrt, startshrt + dshrt])
-                            Lt.append(loc)
+                            Lt.append(location)
                             St.append(startshrt + dshrt * 1.4)
                             Tt.append(legend[l])
                             break
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 03815f71ac7247d331f2be1d8a4595651cbfc965..6307caa187006a33f6d4e1be214ef82bdac3e78f 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -20,7 +20,6 @@ import colormap
 import json
 import os
 import tempfile
-import vcsaddons
 import cdms2
 import genutil
 import vtk
@@ -155,12 +154,24 @@ def dumpToJson(obj, fileout, skipped=[
 
 
 def getfontname(number):
+    """
+    Retrieve a font name for a given font index.
+
+    :param number: Index of the font to get the name of.
+    :type number: int
+    """
     if number not in vcs.elements["fontNumber"]:
         raise Exception("Error font number not existing %i" % number)
     return vcs.elements["fontNumber"][number]
 
 
 def getfontnumber(name):
+    """
+    Retrieve a font index for a given font name.
+
+    :param name: Name of the font to get the index of.
+    :type name: str
+    """
     for i in vcs.elements["fontNumber"]:
         if vcs.elements["fontNumber"][i] == name:
             return i
@@ -994,21 +1005,16 @@ def mklabels(vals, output='dict'):
         amax = float(numpy.ma.maximum(vals))
     #  Number of digit on the left of decimal point
     idigleft = int(numpy.ma.floor(numpy.ma.log10(amax))) + 1
+
     # Now determine the number of significant figures
     idig = 0
     for i in range(nvals):
         aa = numpy.ma.power(10., -idigleft)
         while abs(round(aa * vals[i]) - aa * vals[i]) > .000001:
             aa = aa * 10.
-        idig = numpy.ma.maximum(
-            idig,
-            numpy.ma.floor(
-                numpy.ma.log10(
-                    aa *
-                    numpy.ma.power(
-                        10.,
-                        idigleft))))
+        idig = numpy.ma.maximum(idig, numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft))))
     idig = int(idig)
+
     # Now does the writing part
     lbls = []
     # First if we need an E format
@@ -1621,6 +1627,7 @@ def monotonic(x):
 
 
 def getgraphicsmethod(type, name):
+    import vcsaddons
     if type == "default":
         type = "boxfill"
     if isinstance(type, vcsaddons.core.VCSaddon):
@@ -1634,7 +1641,8 @@ def getgraphicsmethod(type, name):
     return copy_mthd
 
 
-def creategraphicsmethod(gtype, name):
+def creategraphicsmethod(gtype, gname='default', name=None):
+    import vcsaddons
     if gtype in ['isoline', 'Gi']:
         func = vcs.createisoline
     elif gtype in ['isofill', 'Gfi']:
@@ -1657,21 +1665,39 @@ def creategraphicsmethod(gtype, name):
         func = vcs.createvector
     elif gtype in ['taylordiagram', 'Gtd']:
         func = vcs.createtaylordiagram
-    elif isinstance(type, vcsaddons.core.VCSaddon):
-        func = type.creategm
+    elif gtype == '3d_scalar':
+        func = vcs.create3d_scalar
+    elif gtype == '3d_dual_scalar':
+        func = vcs.create3d_dual_scalar
+    elif gtype == '3d_vector':
+        func = vcs.create3d_vector
+    elif isinstance(gtype, vcsaddons.core.VCSaddon):
+        func = gtype.creategm
     else:
         return None
-    copy_mthd = func(source=name)
+    copy_mthd = func(name=name, source=gname)
     return copy_mthd
 
 
+# Returns the float value for datawc_...
+# datawc_ can be a float or a cdtime.reltime
+# TODO: Investigate why datawc is converted to a cdtime.reltime
+def getDataWcValue(v):
+    if (type(v) is type(cdtime.reltime(0, 'months since 1900'))):  # noqa
+        return v.value
+    else:
+        return v
+
+
 def getworldcoordinates(gm, X, Y):
     """Given a graphics method and two axes
     figures out correct world coordinates"""
     # compute the spanning in x and y, and adjust for the viewport
     wc = [0, 1, 0, 1]
     try:
-        if gm.datawc_x1 > 9.E19:
+        datawc = [getDataWcValue(gm.datawc_x1), getDataWcValue(gm.datawc_x2),
+                  getDataWcValue(gm.datawc_y1), getDataWcValue(gm.datawc_y2)]
+        if numpy.isclose(datawc[0], 1.e20):
             try:
                 i = 0
                 try:
@@ -1683,8 +1709,8 @@ def getworldcoordinates(gm, X, Y):
             except:
                 wc[0] = X[:].min()
         else:
-            wc[0] = gm.datawc_x1
-        if gm.datawc_x2 > 9.E19:
+            wc[0] = datawc[0]
+        if numpy.isclose(datawc[1], 1.e20):
             try:
                 i = -1
                 try:
@@ -1696,18 +1722,18 @@ def getworldcoordinates(gm, X, Y):
             except:
                 wc[1] = X[:].max()
         else:
-            wc[1] = gm.datawc_x2
+            wc[1] = datawc[1]
     except:
         return wc
     if (((not isinstance(X, cdms2.axis.TransientAxis) and
           isinstance(Y, cdms2.axis.TransientAxis)) or
          not vcs.utils.monotonic(X[:])) and
-        numpy.allclose([gm.datawc_x1, gm.datawc_x2], 1.e20))\
+        numpy.allclose([datawc[0], datawc[1]], 1.e20))\
             or (hasattr(gm, "projection") and
                 vcs.elements["projection"][gm.projection].type != "linear"):
         wc[0] = X[:].min()
         wc[1] = X[:].max()
-    if gm.datawc_y1 > 9.E19:
+    if numpy.isclose(datawc[2], 1.e20):
         try:
             i = 0
             try:
@@ -1719,8 +1745,8 @@ def getworldcoordinates(gm, X, Y):
         except:
             wc[2] = Y[:].min()
     else:
-        wc[2] = gm.datawc_y1
-    if gm.datawc_y2 > 9.E19:
+        wc[2] = datawc[2]
+    if numpy.isclose(datawc[3], 1.e20):
         try:
             i = -1
             try:
@@ -1732,16 +1758,16 @@ def getworldcoordinates(gm, X, Y):
         except:
             wc[3] = Y[:].max()
     else:
-        wc[3] = gm.datawc_y2
+        wc[3] = datawc[3]
     if (((not isinstance(Y, cdms2.axis.TransientAxis) and
           isinstance(X, cdms2.axis.TransientAxis)) or not vcs.utils.monotonic(Y[:])) and
-        numpy.allclose([gm.datawc_y1, gm.datawc_y2], 1.e20)) \
+        numpy.allclose([datawc[2], datawc[3]], 1.e20)) \
             or (hasattr(gm, "projection") and
                 vcs.elements["projection"][
                 gm.projection].type.lower().split()[0]
                 not in ["linear", "polar"] and
-                numpy.allclose([gm.datawc_y1, gm.datawc_y2], 1.e20) and
-                numpy.allclose([gm.datawc_x1, gm.datawc_x2], 1.e20)):
+                numpy.allclose([datawc[2], datawc[3]], 1.e20) and
+                numpy.allclose([datawc[0], datawc[1]], 1.e20)):
         wc[2] = Y[:].min()
         wc[3] = Y[:].max()
     if wc[3] == wc[2]:
@@ -1793,3 +1819,39 @@ def png_read_metadata(path):
     for i in range(0, numberOfTextChunks):
         m[reader.GetTextKey(i)] = reader.GetTextValue(i)
     return m
+
+
+def download_sample_data_files(path=None):
+    import requests
+    import hashlib
+    if path is None:
+        path = vcs.sample_data
+    samples = open(os.path.join(vcs.prefix, "share", "vcs", "sample_files.txt")).readlines()
+    for sample in samples:
+        good_md5, name = sample.split()
+        local_filename = os.path.join(path, name)
+        try:
+            os.makedirs(os.path.dirname(local_filename))
+        except:
+            pass
+        attempts = 0
+        while attempts < 3:
+            md5 = hashlib.md5()
+            if os.path.exists(local_filename):
+                f = open(local_filename)
+                md5.update(f.read())
+                if md5.hexdigest() == good_md5:
+                    attempts = 5
+                    continue
+            print "Downloading:", name, "in", local_filename
+            r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/" + name, stream=True)
+            with open(local_filename, 'wb') as f:
+                for chunk in r.iter_content(chunk_size=1024):
+                    if chunk:  # filter local_filename keep-alive new chunks
+                        f.write(chunk)
+                        md5.update(chunk)
+            f.close()
+            if md5.hexdigest() == good_md5:
+                attempts = 5
+            else:
+                attempts += 1
diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index d6c449b05ca2dac9959f95813adf172c32fb232b..90a764d0b02c3b7c17d3e5bfb9329d6937ef6146 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -4,12 +4,14 @@ import vtk
 import numpy
 import json
 import os
+import math
 import meshfill
 from vtk.util import numpy_support as VN
 import cdms2
 import warnings
 from projection import round_projections, no_over_proj4_parameter_projections
 from vcsvtk import fillareautils
+import sys
 import numbers
 
 f = open(os.path.join(vcs.prefix, "share", "vcs", "wmo_symbols.json"))
@@ -92,15 +94,14 @@ def setArray(grid, array, arrayName, isCellData, isScalars):
 
 
 def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True):
-    # Ok now looking
     msk = data.mask
     mapper = None
     if msk is not numpy.ma.nomask and not numpy.allclose(msk, False):
         if actorColor is not None:
-            flatIMask = msk.astype(numpy.int).flat
+            flatIMask = msk.astype(numpy.double).flat
             if grid.IsA("vtkStructuredGrid"):
                 grid2 = vtk.vtkStructuredGrid()
-                vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep)
+                vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep, array_type=vtk.VTK_DOUBLE)
                 attributes2 = grid2.GetCellData() if cellData else grid2.GetPointData()
             else:
                 grid2 = vtk.vtkUnstructuredGrid()
@@ -112,26 +113,32 @@ def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True):
                     attributes = grid.GetPointData()
                 if (attributes.GetPedigreeIds()):
                     attributes2.SetPedigreeIds(attributes.GetPedigreeIds())
-                    vtkmask = vtk.vtkIntArray()
+                    pedigreeId = attributes2.GetPedigreeIds()
+                    vtkmask = vtk.vtkDoubleArray()
                     vtkmask.SetNumberOfTuples(attributes2.GetPedigreeIds().GetNumberOfTuples())
+                    for i in range(0, vtkmask.GetNumberOfTuples()):
+                        vtkmask.SetValue(i, flatIMask[pedigreeId.GetValue(i)])
                 else:
                     # the unstructured grid is not wrapped
-                    vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep)
+                    vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep, array_type=vtk.VTK_DOUBLE)
             vtkmask.SetName("scalar")
             attributes2.RemoveArray(vtk.vtkDataSetAttributes.GhostArrayName())
             attributes2.SetScalars(vtkmask)
             grid2.CopyStructure(grid)
-            setArray(grid2, flatIMask, "scalar", isCellData=cellData,
-                     isScalars=True)
             geoFilter = vtk.vtkDataSetSurfaceFilter()
             lut = vtk.vtkLookupTable()
             r, g, b, a = actorColor
-            lut.SetNumberOfTableValues(2)
             geoFilter.SetInputData(grid2)
             if not cellData:
-                lut.SetTableValue(0, r / 100., g / 100., b / 100., a / 100.)
-                lut.SetTableValue(1, r / 100., g / 100., b / 100., a / 100.)
+                pointToCell = vtk.vtkPointDataToCellData()
+                pointToCell.SetInputConnection(geoFilter.GetOutputPort())
+                geoFilter = pointToCell
+                lut.SetNumberOfTableValues(256)
+                lut.SetTableValue(0, 1., 1., 1., 1.)
+                for i in range(1, 256):
+                    lut.SetTableValue(i, r / 100., g / 100., b / 100., a / 100.)
             else:
+                lut.SetNumberOfTableValues(2)
                 lut.SetTableValue(0, r / 100., g / 100., b / 100., 0.)
                 lut.SetTableValue(1, r / 100., g / 100., b / 100., 1.)
             geoFilter.Update()
@@ -163,159 +170,76 @@ def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True):
     return mapper
 
 
-def handleProjectionEdgeCases(projection, data):
-    # For mercator projection, latitude values of -90 or 90
-    # transformation result in infinity values. We chose -85, 85
-    # as that's the typical limit used by the community.
-    ptype = projDict.get(projection._type, projection.type)
-    if (ptype.lower() == "merc"):
-        lat = data.getLatitude()
-        if isinstance(lat, cdms2.axis.TransientAxis):
-            lat = lat[:]
-            # Reverse the latitudes incase the starting latitude is greater
-            # than the ending one
-            if lat[-1] < lat[0]:
-                lat = lat[::-1]
-        data = data(latitude=(max(-85, lat.min()), min(85, lat.max())))
-    return data
-
-
-def genGridOnPoints(data1, gm, deep=True, grid=None, geo=None,
-                    data2=None):
-    continents = False
-    projection = vcs.elements["projection"][gm.projection]
-    xm, xM, ym, yM = None, None, None, None
-    useStructuredGrid = True
-
-    data1 = handleProjectionEdgeCases(projection, data1)
-    if data2 is not None:
-        data2 = handleProjectionEdgeCases(projection, data2)
-
-    try:
-        g = data1.getGrid()
-        if grid is None:
-            x = g.getLongitude()[:]
-            y = g.getLatitude()[:]
-            xm = x[0]
-            xM = x[-1]
-            ym = y[0]
-            yM = y[-1]
-        continents = True
-        wrap = [0, 360]
-        # Ok need unstrctured grid
-        if isinstance(g, cdms2.gengrid.AbstractGenericGrid):
-            useStructuredGrid = False
-    except:
-        # hum no grid that's much easier
-        wrap = None
-        if grid is None:
-            x = data1.getAxis(-1)[:]
-            y = data1.getAxis(-2)[:]
-            xm = x[0]
-            xM = x[-1]
-            ym = y[0]
-            yM = y[-1]
-
-    if grid is None:
-        if x.ndim == 1:
-            y = y[:, numpy.newaxis] * numpy.ones(x.shape)[numpy.newaxis, :]
-            x = x[numpy.newaxis, :] * numpy.ones(y.shape)
-        x = x.flatten()
-        y = y.flatten()
-        sh = list(x.shape)
-        sh.append(1)
-        x = numpy.reshape(x, sh)
-        y = numpy.reshape(y, sh)
-        # Ok we have our points in 2D let's create unstructured points grid
-        if xm is None:
-            xm = x.min()
-        if xM is None:
-            xM = x.max()
-        if ym is None:
-            ym = y.min()
-        if yM is None:
-            yM = y.max()
-        z = numpy.zeros(x.shape)
-        m3 = numpy.concatenate((x, y), axis=1)
-        m3 = numpy.concatenate((m3, z), axis=1)
-        deep = True
-        pts = vtk.vtkPoints()
-        # Convert nupmy array to vtk ones
-        ppV = numpy_to_vtk_wrapper(m3, deep=deep)
-        pts.SetData(ppV)
-        xm, xM, ym, yM, tmp, tmp2 = pts.GetBounds()
-    else:
-        xm, xM, ym, yM, tmp, tmp2 = grid.GetPoints().GetBounds()
-        vg = grid
-    oldpts = pts
-    if geo is None:
-        bounds = pts.GetBounds()
-        xm, xM, ym, yM = [bounds[0], bounds[1], bounds[2], bounds[3]]
-        # We use zooming feature (gm.datawc) for linear and polar projections.
-        # We use wrapped coordinates for doing the projection
-        # such that parameters like the central meridian are set correctly.
-        if (gm.g_name == 'Gfm'):
-            # axes are not lon/lat for meshfill
-            wc = [gm.datawc_x1, gm.datawc_x2, gm.datawc_y1, gm.datawc_y2]
-        else:
-            wc = vcs.utils.getworldcoordinates(gm,
-                                               data1.getAxis(-1),
-                                               data1.getAxis(-2))
-        geo, geopts = project(pts, projection, getWrappedBounds(
-            wc, [xm, xM, ym, yM], wrap))
-        pts = geopts
-    # Sets the vertices into the grid
-    if grid is None:
-        if useStructuredGrid:
-            vg = vtk.vtkStructuredGrid()
-            vg.SetDimensions(data1.shape[1], data1.shape[0], 1)
-        else:
-            vg = vtk.vtkUnstructuredGrid()
-        vg.SetPoints(oldpts)
-        vg.SetPoints(pts)
-    else:
-        vg = grid
-    scalar = numpy_to_vtk_wrapper(data1.filled(0.).flat,
-                                  deep=False)
-    scalar.SetName("scalar")
-    vg.GetPointData().SetScalars(scalar)
-    out = {"vtk_backend_grid": vg,
-           "xm": xm,
-           "xM": xM,
-           "ym": ym,
-           "yM": yM,
-           "continents": continents,
-           "wrap": wrap,
-           "geo": geo,
-           "data": data1,
-           "data2": data2
-           }
-    return out
-
-
-# Returns the bounds list for 'axis'. If axis has n elements the
-# bounds list will have n+1 elements
-def getBoundsList(axis):
-    bounds = numpy.zeros(len(axis) + 1)
-    try:
-        axisBounds = axis.getBounds()
+def getBoundsList(axis, hasCellData, dualGrid):
+    '''
+    Returns the bounds list for 'axis'. If axis has n elements the
+    bounds list will have n+1 elements
+    If there are not explicit bounds in the file we return None
+    '''
+    needsCellData = (hasCellData != dualGrid)
+    axisBounds = axis.getBoundsForDualGrid(dualGrid)
+    # we still have to generate bounds for non lon-lat axes, because
+    # the default in axis.py is 2 (generate bounds only for lat/lon axis)
+    # this is used for non lon-lat plots - by default numpy arrays are POINT data
+    if (not axis.isLatitude() and not axis.isLongitude() and needsCellData):
+        axisBounds = axis.genGenericBounds()
+    if (axisBounds is not None):
+        bounds = numpy.zeros(len(axis) + 1)
         if (axis[0] < axis[-1]):
             # axis is increasing
-            bounds[:len(axis)] = axisBounds[:, 0]
-            bounds[len(axis)] = axisBounds[-1, 1]
+            if (axisBounds[0][0] < axisBounds[0][1]):
+                # interval is increasing
+                bounds[:len(axis)] = axisBounds[:, 0]
+                bounds[len(axis)] = axisBounds[-1, 1]
+            else:
+                # interval is decreasing
+                bounds[:len(axis)] = axisBounds[:, 1]
+                bounds[len(axis)] = axisBounds[-1, 0]
         else:
             # axis is decreasing
-            bounds[:len(axis)] = axisBounds[:, 1]
-            bounds[len(axis)] = axisBounds[-1, 0]
-    except Exception:
-        # No luck we have to generate bounds ourselves
-        bounds[1:-1] = (axis[:-1] + axis[1:]) / 2.
-        bounds[0] = axis[0] - (axis[1] - axis[0]) / 2.
-        bounds[-1] = axis[-1] + (axis[-1] - axis[-2]) / 2.
-    return bounds
-
-
-def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
+            if (axisBounds[0][0] < axisBounds[0][1]):
+                # interval is increasing
+                bounds[:len(axis)] = axisBounds[:, 1]
+                bounds[len(axis)] = axisBounds[-1, 0]
+            else:
+                # interval is decreasing
+                bounds[:len(axis)] = axisBounds[:, 0]
+                bounds[len(axis)] = axisBounds[-1, 1]
+        return bounds
+    else:
+        return None
+
+
+def setInfToValid(geoPoints, ghost):
+    '''
+    Set infinity points to a point that already exists in the list.
+    If a ghost array is passed, we also hide infinity points.
+    We return true if any points are infinity
+    '''
+    anyInfinity = False
+    validPoint = [0, 0, 0]
+    for i in range(geoPoints.GetNumberOfPoints()):
+        point = geoPoints.GetPoint(i)
+        if (not math.isinf(point[0]) and not math.isinf(point[1])):
+            validPoint[0] = point[0]
+            validPoint[1] = point[1]
+            break
+    for i in range(geoPoints.GetNumberOfPoints()):
+        point = geoPoints.GetPoint(i)
+        if (math.isinf(point[0]) or math.isinf(point[1])):
+            anyInfinity = True
+            newPoint = list(point)
+            if (math.isinf(point[0])):
+                newPoint[0] = validPoint[0]
+            if (math.isinf(point[1])):
+                newPoint[1] = validPoint[1]
+            geoPoints.SetPoint(i, newPoint)
+            ghost.SetValue(i, vtk.vtkDataSetAttributes.HIDDENPOINT)
+    return anyInfinity
+
+
+def genGrid(data1, data2, gm, deep=True, grid=None, geo=None, genVectors=False,
+            dualGrid=False):
     continents = False
     wrap = None
     m3 = None
@@ -324,8 +248,6 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
     xm, xM, ym, yM = None, None, None, None
     projection = vcs.elements["projection"][gm.projection]
 
-    data1 = handleProjectionEdgeCases(projection, data1)
-
     try:  # First try to see if we can get a mesh out of this
         g = data1.getGrid()
         # Ok need unstructured grid
@@ -388,6 +310,7 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
         # Ok a simple structured grid is enough
         if grid is None:
             vg = vtk.vtkStructuredGrid()
+        hasCellData = data1.hasCellData()
         if g is not None:
             # Ok we have grid
             continents = True
@@ -406,38 +329,49 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                 lon = data1.getAxis(-1)
                 lat = data1.getAxis(-2)
                 # Ok let's try to get the bounds
-                lon2 = getBoundsList(lon)
-                lat2 = getBoundsList(lat)
+                lon2 = getBoundsList(lon, hasCellData, dualGrid)
+                lat2 = getBoundsList(lat, hasCellData, dualGrid)
+                if (lon2 is not None and lat2 is not None):
+                    lon3 = lon2
+                    lat3 = lat2
+                else:
+                    lon3 = lon
+                    lat3 = lat
+                    cellData = False
                 # Note that m,M is min,max for an increasing list
                 # and max,min for a decreasing list
-                xm = lon2[0]
-                xM = lon2[-1]
-                ym = lat2[0]
-                yM = lat2[-1]
-
-                lat = lat2[:, numpy.newaxis] * \
-                    numpy.ones(lon2.shape)[numpy.newaxis, :]
-                lon = lon2[numpy.newaxis,
-                           :] * numpy.ones(lat2.shape)[:,
-                                                       numpy.newaxis]
+                xm = lon3[0]
+                xM = lon3[-1]
+                ym = lat3[0]
+                yM = lat3[-1]
+
+                lat = lat3[:, numpy.newaxis] * numpy.ones(lon3.shape)[numpy.newaxis, :]
+                lon = lon3[numpy.newaxis, :] * numpy.ones(lat3.shape)[:, numpy.newaxis]
         elif grid is None:
             # No grid info from data, making one up
             data1 = cdms2.asVariable(data1)
             lon = data1.getAxis(-1)
             lat = data1.getAxis(-2)
             # Ok let's try to get the bounds
-            lon2 = getBoundsList(lon)
-            lat2 = getBoundsList(lat)
+            lon2 = getBoundsList(lon, hasCellData, dualGrid)
+            lat2 = getBoundsList(lat, hasCellData, dualGrid)
+            if (lon2 is not None and lat2 is not None):
+                lon3 = lon2
+                lat3 = lat2
+            else:
+                lon3 = lon
+                lat3 = lat
+                cellData = False
             # Note that m,M is min,max for an increasing list
             # and max,min for a decreasing list
-            xm = lon2[0]
-            xM = lon2[-1]
-            ym = lat2[0]
-            yM = lat2[-1]
-            lat = lat2[:, numpy.newaxis] * \
-                numpy.ones(lon2.shape)[numpy.newaxis, :]
-            lon = lon2[numpy.newaxis, :] * \
-                numpy.ones(lat2.shape)[:, numpy.newaxis]
+            xm = lon3[0]
+            xM = lon3[-1]
+            ym = lat3[0]
+            yM = lat3[-1]
+            lat = lat3[:, numpy.newaxis] * \
+                numpy.ones(lon3.shape)[numpy.newaxis, :]
+            lon = lon3[numpy.newaxis, :] * \
+                numpy.ones(lat3.shape)[:, numpy.newaxis]
         if grid is None:
             vg.SetDimensions(lat.shape[1], lat.shape[0], 1)
             lon = numpy.ma.ravel(lon)
@@ -461,15 +395,23 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                     ym = lat.min()
                     yM = lat.max()
 
-    # scalar data
-    scalar = numpy_to_vtk_wrapper(data1.filled(0.).flat,
-                                  deep=False)
-    scalar.SetName("scalar")
-    gridForScalar = grid if grid else vg
+    # attribute data
+    gridForAttribute = grid if grid else vg
+    if genVectors:
+        attribute = generateVectorArray(data1, data2, gridForAttribute)
+    else:
+        attribute = numpy_to_vtk_wrapper(data1.filled(0.).flat,
+                                         deep=False)
+        attribute.SetName("scalar")
     if cellData:
-        gridForScalar.GetCellData().SetScalars(scalar)
+        attributes = gridForAttribute.GetCellData()
     else:
-        gridForScalar.GetPointData().SetScalars(scalar)
+        attributes = gridForAttribute.GetPointData()
+    if genVectors:
+        attributes.SetVectors(attribute)
+    else:
+        attributes.SetScalars(attribute)
+
     if grid is None:
         # First create the points/vertices (in vcs terms)
         pts = vtk.vtkPoints()
@@ -489,8 +431,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
             # wrapping
             pedigreeId = vtk.vtkIntArray()
             pedigreeId.SetName("PedigreeIds")
-            pedigreeId.SetNumberOfTuples(scalar.GetNumberOfTuples())
-            for i in range(0, scalar.GetNumberOfTuples()):
+            pedigreeId.SetNumberOfTuples(attribute.GetNumberOfTuples())
+            for i in range(0, attribute.GetNumberOfTuples()):
                 pedigreeId.SetValue(i, i)
             if cellData:
                 vg.GetCellData().SetPedigreeIds(pedigreeId)
@@ -516,6 +458,25 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                                                data1.getAxis(-2))
         geo, geopts = project(pts, projection, getWrappedBounds(
             wc, [xm, xM, ym, yM], wrap))
+        # proj4 returns inf for points that are not visible. Set those to a valid point
+        # and hide them.
+        ghost = vg.AllocatePointGhostArray()
+        if (setInfToValid(geopts, ghost)):
+            # if there are hidden points, we recompute the bounds
+            xm = ym = sys.float_info.max
+            xM = yM = - sys.float_info.max
+            for i in range(pts.GetNumberOfPoints()):
+                if (ghost.GetValue(i) & vtk.vtkDataSetAttributes.HIDDENPOINT == 0):
+                    # point not hidden
+                    p = pts.GetPoint(i)
+                    if (p[0] < xm):
+                        xm = p[0]
+                    if (p[0] > xM):
+                        xM = p[0]
+                    if (p[1] < ym):
+                        ym = p[1]
+                    if (p[1] > yM):
+                        yM = p[1]
         # Sets the vertics into the grid
         vg.SetPoints(geopts)
     else:
@@ -533,7 +494,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
            "wrap": wrap,
            "geo": geo,
            "cellData": cellData,
-           "data": data1
+           "data": data1,
+           "data2": data2
            }
     return out
 
@@ -628,24 +590,42 @@ def apply_proj_parameters(pd, projection, x1, x2, y1, y2):
         else:
             pd.SetOptionalParameter("over", "false")
             setProjectionParameters(pd, projection)
-        if (hasattr(projection, 'centralmeridian') and
-                numpy.allclose(projection.centralmeridian, 1e+20)):
-            pd.SetCentralMeridian(float(x1 + x2) / 2.0)
-        if (hasattr(projection, 'centerlongitude') and
-                numpy.allclose(projection.centerlongitude, 1e+20)):
-            pd.SetOptionalParameter("lon_0", str(float(x1 + x2) / 2.0))
-        if (hasattr(projection, 'originlatitude') and
-                numpy.allclose(projection.originlatitude, 1e+20)):
-            pd.SetOptionalParameter("lat_0", str(float(y1 + y2) / 2.0))
-        if (hasattr(projection, 'centerlatitude') and
-                numpy.allclose(projection.centerlatitude, 1e+20)):
-            pd.SetOptionalParameter("lat_0", str(float(y1 + y2) / 2.0))
-        if (hasattr(projection, 'standardparallel1') and
-                numpy.allclose(projection.standardparallel1, 1.e20)):
-            pd.SetOptionalParameter('lat_1', str(min(y1, y2)))
-        if (hasattr(projection, 'standardparallel2') and
-                numpy.allclose(projection.standardparallel2, 1.e20)):
-            pd.SetOptionalParameter('lat_2', str(max(y1, y2)))
+        if (hasattr(projection, 'centralmeridian')):
+            if (numpy.allclose(projection.centralmeridian, 1e+20)):
+                centralmeridian = float(x1 + x2) / 2.0
+            else:
+                centralmeridian = projection.centralmeridian
+            pd.SetCentralMeridian(centralmeridian)
+        if (hasattr(projection, 'centerlongitude')):
+            if (numpy.allclose(projection.centerlongitude, 1e+20)):
+                centerlongitude = float(x1 + x2) / 2.0
+            else:
+                centerlongitude = projection.centerlongitude
+            pd.SetOptionalParameter("lon_0", str(centerlongitude))
+        if (hasattr(projection, 'originlatitude')):
+            if (numpy.allclose(projection.originlatitude, 1e+20)):
+                originlatitude = float(y1 + y2) / 2.0
+            else:
+                originlatitude = projection.originlatitude
+            pd.SetOptionalParameter("lat_0", str(originlatitude))
+        if (hasattr(projection, 'centerlatitude')):
+            if (numpy.allclose(projection.centerlatitude, 1e+20)):
+                centerlatitude = float(y1 + y2) / 2.0
+            else:
+                centerlatitude = projection.centerlatitude
+            pd.SetOptionalParameter("lat_0", str(centerlatitude))
+        if (hasattr(projection, 'standardparallel1')):
+            if (numpy.allclose(projection.standardparallel1, 1.e20)):
+                standardparallel1 = min(y1, y2)
+            else:
+                standardparallel1 = projection.standardparallel1
+            pd.SetOptionalParameter('lat_1', str(standardparallel1))
+        if (hasattr(projection, 'standardparallel2')):
+            if (numpy.allclose(projection.standardparallel2, 1.e20)):
+                standardparallel2 = max(y1, y2)
+            else:
+                standardparallel2 = projection.standardparallel2
+            pd.SetOptionalParameter('lat_2', str(standardparallel2))
 
 
 def projectArray(w, projection, wc, geo=None):
@@ -667,9 +647,9 @@ def projectArray(w, projection, wc, geo=None):
 
     for i in range(0, w.GetNumberOfTuples()):
         tuple = [0, 0, 0]
-        w.GetTupleValue(i, tuple)
+        w.GetTypedTuple(i, tuple)
         geo.TransformPoint(tuple, tuple)
-        w.SetTupleValue(i, tuple)
+        w.SetTypedTuple(i, tuple)
 
 
 # Geo projection
@@ -880,6 +860,7 @@ def doWrap(Act, wc, wrap=[0., 360], fastClip=True):
     if wrap is None:
         return Act
     Mapper = Act.GetMapper()
+    Mapper.Update()
     data = Mapper.GetInput()
     # insure that GLOBALIDS are not removed by the append filter
     attributes = data.GetCellData()
@@ -1142,7 +1123,7 @@ def prepTextProperty(p, winSize, to="default", tt="default", cmap=None,
 
 
 def genTextActor(renderer, string=None, x=None, y=None,
-                 to='default', tt='default', cmap=None):
+                 to='default', tt='default', cmap=None, geoBounds=None, geo=None):
     if isinstance(to, str):
         to = vcs.elements["textorientation"][to]
     if isinstance(tt, str):
@@ -1166,21 +1147,8 @@ def genTextActor(renderer, string=None, x=None, y=None,
     sz = renderer.GetRenderWindow().GetSize()
     actors = []
     pts = vtk.vtkPoints()
-    geo = None
     if vcs.elements["projection"][tt.projection].type != "linear":
-            # Need to figure out new WC
-        Npts = 20
-        for i in range(Npts + 1):
-            X = tt.worldcoordinate[
-                0] + float(i) / Npts * (tt.worldcoordinate[1] -
-                                        tt.worldcoordinate[0])
-            for j in range(Npts + 1):
-                Y = tt.worldcoordinate[
-                    2] + float(j) / Npts * (tt.worldcoordinate[3] -
-                                            tt.worldcoordinate[2])
-                pts.InsertNextPoint(X, Y, 0.)
-        geo, pts = project(pts, tt.projection, tt.worldcoordinate, geo=None)
-        wc = pts.GetBounds()[:4]
+        wc = geoBounds[:4]
         # renderer.SetViewport(tt.viewport[0],tt.viewport[2],tt.viewport[1],tt.viewport[3])
         renderer.SetWorldPoint(wc)
 
@@ -1190,8 +1158,8 @@ def genTextActor(renderer, string=None, x=None, y=None,
         prepTextProperty(p, sz, to, tt, cmap)
         pts = vtk.vtkPoints()
         pts.InsertNextPoint(x[i], y[i], 0.)
-        if geo is not None:
-            geo, pts = project(pts, tt.projection, tt.worldcoordinate, geo=geo)
+        if vcs.elements["projection"][tt.projection].type != "linear":
+            _, pts = project(pts, tt.projection, tt.worldcoordinate, geo=geo)
             X, Y, tz = pts.GetPoint(0)
             X, Y = world2Renderer(renderer, X, Y, tt.viewport, wc)
         else:
@@ -1328,9 +1296,9 @@ def prepFillarea(renWin, farea, cmap=None):
             if opacity is not None:
                 color[-1] = opacity
             color = [int(C / 100. * 255) for C in color]
-            colors.SetTupleValue(cellId, color)
+            colors.SetTypedTuple(cellId, color)
         else:
-            color_arr.SetTupleValue(cellId, [255, 255, 255, 0])
+            color_arr.SetTypedTuple(cellId, [255, 255, 255, 0])
 
         if st != "solid":
             # Patterns/hatches support
@@ -1651,16 +1619,16 @@ def __build_ld__():
 
 def stippleLine(prop, line_type):
     if line_type == 'long-dash':
-        prop.SetLineStipplePattern(int('1111111100000000', 2))
+        prop.SetLineStipplePattern(int('0000111111111111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dot':
-        prop.SetLineStipplePattern(int('1010101010101010', 2))
+        prop.SetLineStipplePattern(int('0101010101010101', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dash':
-        prop.SetLineStipplePattern(int('1111000011110000', 2))
+        prop.SetLineStipplePattern(int('0001111100011111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dash-dot':
-        prop.SetLineStipplePattern(int('0011110000110011', 2))
+        prop.SetLineStipplePattern(int('0101111101011111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'solid':
         prop.SetLineStipplePattern(int('1111111111111111', 2))
@@ -1738,7 +1706,7 @@ def prepLine(renWin, line, cmap=None):
                     pts.InsertNextPoint(tmpx, tmpy, 0.)
                     n2 += 1
         for j in range(n2):
-            colors.InsertNextTupleValue(vtk_color)
+            colors.InsertNextTypedTuple(vtk_color)
             l = vtk.vtkLine()
             l.GetPointIds().SetId(0, j + point_offset)
             l.GetPointIds().SetId(1, j + point_offset + 1)
@@ -1853,36 +1821,17 @@ def generateVectorArray(data1, data2, vtk_grid):
     w = numpy.concatenate((u, v), axis=1)
     w = numpy.concatenate((w, z), axis=1)
 
-    # HACK The grid returned by vtk2vcs.genGrid is not the same size as the
-    # data array. I'm not sure where the issue is...for now let's just zero-pad
-    # data array so that we can at least test rendering until Charles gets
-    # back from vacation:
-    wLen = len(w)
-    numPts = vtk_grid.GetNumberOfPoints()
-    if wLen != numPts:
-        warnings.warn("!!! Warning during vector plotting: "
-                      "Number of points does not "
-                      "match the number of vectors to be "
-                      "glyphed (%s points vs %s "
-                      "vectors). The vectors will be "
-                      "padded/truncated to match for "
-                      "rendering purposes, but the resulting "
-                      "image should not be "
-                      "trusted." % (numPts, wLen))
-        newShape = (numPts,) + w.shape[1:]
-        w = numpy.ma.resize(w, newShape)
-
     w = numpy_to_vtk_wrapper(w, deep=False)
-    w.SetName("vectors")
+    w.SetName("vector")
     return w
 
 
 def vtkIterate(iterator):
     iterator.InitTraversal()
-    obj = iterator.GetNextItem()
+    obj = iterator.GetNextItemAsObject()
     while obj is not None:
         yield obj
-        obj = iterator.GetNextItem()
+        obj = iterator.GetNextItemAsObject()
 
 
 def getPlottingBounds(gmbounds, databounds, geo):
diff --git a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
index 7726521728c320fc47d2f969ff9bf1cf209dc095..005241b4a09b87fd380d6042a7202a7a2bb76284 100644
--- a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
@@ -24,6 +24,7 @@ class BoxfillPipeline(Pipeline2D):
         self._contourLabels = None
         self._mappers = None
         self._customBoxfillArgs = {}
+        self._needsCellData = True
 
     def _updateScalarData(self):
         """Overrides baseclass implementation."""
@@ -47,57 +48,11 @@ class BoxfillPipeline(Pipeline2D):
 
     def _updateContourLevelsAndColorsForBoxfill(self):
         """Set contour information for a standard boxfill."""
-        # Compute levels
-        nlev = (self._gm.color_2 - self._gm.color_1) + 1
-        if numpy.allclose(self._gm.level_1, 1.e20) or \
-           numpy.allclose(self._gm.level_2, 1.e20):
-            self._contourLevels = vcs.mkscale(self._scalarRange[0],
-                                              self._scalarRange[1])
-            if len(self._contourLevels) == 1:  # constant value ?
-                self._contourLevels = [self._contourLevels[0],
-                                       self._contourLevels[0] + .00001]
-            self._contourLabels = vcs.mklabels(self._contourLevels)
-            dx = (self._contourLevels[-1] - self._contourLevels[0]) / nlev
-            self._contourLevels = numpy.arange(self._contourLevels[0],
-                                               self._contourLevels[-1] + dx,
-                                               dx)
-        else:
-            if self._gm.boxfill_type == "log10":
-                levslbls = vcs.mkscale(numpy.ma.log10(self._gm.level_1),
-                                       numpy.ma.log10(self._gm.level_2))
-                self._contourLevels = vcs.mkevenlevels(
-                    numpy.ma.log10(self._gm.level_1),
-                    numpy.ma.log10(self._gm.level_2), nlev=nlev)
-            else:
-                levslbls = vcs.mkscale(self._gm.level_1, self._gm.level_2)
-                self._contourLevels = vcs.mkevenlevels(self._gm.level_1,
-                                                       self._gm.level_2,
-                                                       nlev=nlev)
-            if len(self._contourLevels) > 25:
-                # Too many colors/levels need to prettyfy this for legend
-                self._contourLabels = vcs.mklabels(levslbls)
-                # Make sure extremes are in
-                legd2 = vcs.mklabels([self._contourLevels[0],
-                                      self._contourLevels[-1]])
-                self._contourLabels.update(legd2)
-            else:
-                self._contourLabels = vcs.mklabels(self._contourLevels)
-            if self._gm.boxfill_type == "log10":
-                logLabels = {}
-                for key in self._contourLabels.keys():
-                    value = self._contourLabels[key]
-                    newKey = float(numpy.ma.log10(value))
-                    logLabels[newKey] = value
-                self._contourLabels = logLabels
-
+        self._contourLevels = self._gm.getlevels(self._scalarRange[0], self._scalarRange[1])
+        self._contourLabels = self._gm.getlegendlabels(self._contourLevels)
         # Use consecutive colors:
         self._contourColors = range(self._gm.color_1, self._gm.color_2 + 1)
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
         # Special case for custom boxfills:
@@ -175,8 +130,6 @@ class BoxfillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
 
         for act in patternActors:
             if self._vtkGeoTransform is None:
@@ -199,7 +152,8 @@ class BoxfillPipeline(Pipeline2D):
             z = None
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
@@ -256,7 +210,6 @@ class BoxfillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
 
     def _plotInternalBoxfill(self):
         """Implements the logic to render a non-custom boxfill."""
@@ -286,7 +239,7 @@ class BoxfillPipeline(Pipeline2D):
 
         # Colortable bit
         # make sure length match
-        numLevels = len(self._contourLevels)
+        numLevels = len(self._contourLevels) - 1
         while len(self._contourColors) < numLevels:
             self._contourColors.append(self._contourColors[-1])
 
diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
index e2ee5360ef183cf0d2247ba4f2d87da43dc760e5..273376c0909b696e7c80f78c9ae569bc1a741350 100644
--- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
@@ -13,44 +13,19 @@ class IsofillPipeline(Pipeline2D):
 
     def __init__(self, gm, context_):
         super(IsofillPipeline, self).__init__(gm, context_)
-
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
-        # Force point data for isoline/isofill
-        genGridDict = vcs2vtk.genGridOnPoints(self._data1, self._gm,
-                                              deep=False,
-                                              grid=self._vtkDataSet,
-                                              geo=self._vtkGeoTransform)
-        genGridDict["cellData"] = False
-        self._data1 = genGridDict["data"]
-        self._updateFromGenGridDict(genGridDict)
+        self._needsCellData = False
 
     def _updateContourLevelsAndColors(self):
         self._updateContourLevelsAndColorsGeneric()
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            # Sets data to point instead of just cells
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(self._vtkDataSet)
-            c2p.Update()
-            # For contouring duplicate points seem to confuse it
-            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
-        else:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        self._vtkPolyDataFilter.Update()
-        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
 
-        prepedContours = self._prepContours()
-        tmpLevels = prepedContours["tmpLevels"]
-        tmpIndices = prepedContours["tmpIndices"]
-        tmpColors = prepedContours["tmpColors"]
-        tmpOpacities = prepedContours["tmpOpacities"]
+        preppedCountours = self._prepContours()
+        tmpLevels = preppedCountours["tmpLevels"]
+        tmpIndices = preppedCountours["tmpIndices"]
+        tmpColors = preppedCountours["tmpColors"]
+        tmpOpacities = preppedCountours["tmpOpacities"]
         style = self._gm.fillareastyle
 
         luts = []
@@ -92,7 +67,13 @@ class IsofillPipeline(Pipeline2D):
                     lut.SetTableValue(j, 1., 1., 1., 0.)
             luts.append([lut, [0, len(l) - 1, True]])
             mapper.SetLookupTable(lut)
-            mapper.SetScalarRange(0, len(l) - 1)
+            minRange = 0
+            maxRange = len(l) - 1
+            if (i == 0 and self._scalarRange[0] < l[0]):
+                # band 0 is from self._scalarRange[0] to l[0]
+                # we don't show band 0
+                minRange += 1
+            mapper.SetScalarRange(minRange, maxRange)
             mapper.SetScalarModeToUseCellData()
             mappers.append(mapper)
 
@@ -183,8 +164,6 @@ class IsofillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         for act in patternActors:
             self._context().fitToViewport(
                 act, vp,
@@ -203,7 +182,8 @@ class IsofillPipeline(Pipeline2D):
             z = None
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
@@ -251,4 +231,3 @@ class IsofillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index af79a45b862c6329ae9151f640936c5666233a66..1204d973cc38c9ae2c4fb20350b6905f142fd967 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -12,21 +12,15 @@ class IsolinePipeline(Pipeline2D):
 
     def __init__(self, gm, context_):
         super(IsolinePipeline, self).__init__(gm, context_)
+        self._needsCellData = False
 
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
-        # Force point data for isoline/isofill
-        genGridDict = vcs2vtk.genGridOnPoints(self._data1, self._gm,
-                                              deep=False,
-                                              grid=self._vtkDataSet,
-                                              geo=self._vtkGeoTransform)
-        genGridDict["cellData"] = False
-        self._data1 = genGridDict["data"]
-        self._updateFromGenGridDict(genGridDict)
-
-        data = vcs2vtk.numpy_to_vtk_wrapper(self._data1.filled(0.).flat,
-                                            deep=False)
-        self._vtkDataSet.GetPointData().SetScalars(data)
+    def extendAttribute(self, attributes, default):
+        if len(attributes) < len(self._contourLevels):
+            if (len(attributes) == 0):
+                attributeValue = default
+            else:
+                attributeValue = attributes[-1]
+            attributes += [attributeValue] * (len(self._contourLevels) - len(attributes))
 
     def _updateContourLevelsAndColors(self):
         """Overrides baseclass implementation."""
@@ -45,23 +39,8 @@ class IsolinePipeline(Pipeline2D):
             else:
                 if numpy.allclose(self._contourLevels[0], 1.e20):
                     self._contourLevels[0] = -1.e20
-
-        # Contour colors:
         self._contourColors = self._gm.linecolors
-
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            # Sets data to point instead of just cells
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(self._vtkDataSet)
-            c2p.Update()
-            # For contouring duplicate points seem to confuse it
-            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
-        else:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
+        self.extendAttribute(self._contourColors, default='black')
 
     def _plotInternal(self):
         """Overrides baseclass implementation."""
@@ -71,15 +50,10 @@ class IsolinePipeline(Pipeline2D):
         tmpLineStyles = []
 
         linewidth = self._gm.linewidths
-        linestyle = self._gm.line
+        self.extendAttribute(linewidth, default=1.0)
 
-        if len(linewidth) < len(self._contourLevels):
-            # fill up the line width values
-            linewidth += [1.0] * (len(self._contourLevels) - len(linewidth))
-
-        if len(linestyle) < len(self._contourLevels):
-            # fill up the line style values
-            linestyle += ['solid'] * (len(self._contourLevels) - len(linestyle))
+        linestyle = self._gm.line
+        self.extendAttribute(linestyle, default='solid')
 
         plotting_dataset_bounds = self.getPlottingBounds()
         x1, x2, y1, y2 = plotting_dataset_bounds
@@ -97,20 +71,14 @@ class IsolinePipeline(Pipeline2D):
                 if W == linewidth[i] and S == linestyle[i]:
                     # Ok same style and width, lets keep going
                     L.append(l)
-                    if i >= len(self._contourColors):
-                        C.append(self._contourColors[-1])
-                    else:
-                        C.append(self._contourColors[i])
+                    C.append(self._contourColors[i])
                 else:
                     tmpLevels.append(L)
                     tmpColors.append(C)
                     tmpLineWidths.append(W)
                     tmpLineStyles.append(S)
                     L = [l]
-                    if i >= len(self._contourColors):
-                        C = [self._contourColors[-1]]
-                    else:
-                        C = [self._contourColors[i]]
+                    C = [self._contourColors[i]]
                     W = linewidth[i]
                     S = linestyle[i]
 
@@ -164,7 +132,7 @@ class IsolinePipeline(Pipeline2D):
             numLevels = len(l)
 
             cot = vtk.vtkContourFilter()
-            if self._useCellScalars:
+            if self._hasCellData:
                 cot.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
             else:
                 cot.SetInputData(self._vtkDataSet)
@@ -172,7 +140,6 @@ class IsolinePipeline(Pipeline2D):
 
             for n in range(numLevels):
                 cot.SetValue(n, l[n])
-            cot.SetValue(numLevels, l[-1])
             # TODO remove update
             cot.Update()
 
@@ -301,8 +268,6 @@ class IsolinePipeline(Pipeline2D):
                 create_renderer=(dataset_renderer is None))
 
             countLevels += len(l)
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         if len(textprops) > 0:
             self._resultDict["vtk_backend_contours_labels_text_properties"] = \
                 textprops
@@ -342,7 +307,8 @@ class IsolinePipeline(Pipeline2D):
             z = None
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
@@ -360,4 +326,3 @@ class IsolinePipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
diff --git a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
index 90c06718d2d5a0b8a45e39acc625e7610d537cfe..49320aff932b395c8c82baa9cf582f31a62a597f 100644
--- a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
@@ -15,6 +15,7 @@ class MeshfillPipeline(Pipeline2D):
         super(MeshfillPipeline, self).__init__(gm, context_)
 
         self._patternActors = []
+        self._needsCellData = True
 
     def _updateScalarData(self):
         """Overrides baseclass implementation."""
@@ -25,16 +26,6 @@ class MeshfillPipeline(Pipeline2D):
     def _updateContourLevelsAndColors(self):
         self._updateContourLevelsAndColorsGeneric()
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        else:
-            p2c = vtk.vtkPointDataToCellData()
-            p2c.SetInputData(self._vtkDataSet)
-            self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
-
     def _plotInternal(self):
 
         prepedContours = self._prepContours()
@@ -204,8 +195,6 @@ class MeshfillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         for act in self._patternActors:
             if self._vtkGeoTransform is None:
                 # If using geofilter on wireframed does not get wrapped not sure
@@ -221,7 +210,8 @@ class MeshfillPipeline(Pipeline2D):
         self._resultDict["vtk_backend_actors"] = actors
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._template.plot(self._context().canvas, self._data1, self._gm,
@@ -279,7 +269,6 @@ class MeshfillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
 
     def getPlottingBounds(self):
         """gm.datawc if it is set or dataset_bounds
diff --git a/Packages/vcs/vcs/vcsvtk/pipeline2d.py b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
index 4d370db8aebe87ca6b037739ed1bde71b4a729d3..dc12f3f5cdb050a01b5009a99e2a9731806b14f5 100644
--- a/Packages/vcs/vcs/vcsvtk/pipeline2d.py
+++ b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
@@ -1,9 +1,10 @@
 from .pipeline import Pipeline
 from .. import vcs2vtk
 
-import vcs
-import numpy
 import fillareautils
+import numpy
+import vcs
+import vtk
 import warnings
 
 
@@ -45,8 +46,11 @@ class IPipeline2D(Pipeline):
         - _useContinents: Whether or not to plot continents.
         - _dataWrapModulo: Wrap modulo as [YMax, XMax], in degrees. 0 means
             'no wrapping'.
-        - _useCellScalars: True if data is applied to cell, false if data is
+        - _hasCellData: True if data is applied to cell, false if data is
             applied to points.
+        - _needsCellData: True if the plot needs cell scalars, false if
+            the plot needs point scalars
+        - _needsVectors: True if the plot needs vectors, false if it needs scalars
         - _scalarRange: The range of _data1 as tuple(float min, float max)
         - _maskedDataMapper: The mapper used to render masked data.
     """
@@ -74,7 +78,9 @@ class IPipeline2D(Pipeline):
         self._colorMap = None
         self._useContinents = None
         self._dataWrapModulo = None
-        self._useCellScalars = None
+        self._hasCellData = None
+        self._needsCellData = None
+        self._needsVectors = False
         self._scalarRange = None
         self._maskedDataMapper = None
 
@@ -82,7 +88,7 @@ class IPipeline2D(Pipeline):
         """Create _data1 and _data2 from _originalData1 and _originalData2."""
         raise NotImplementedError("Missing override.")
 
-    def _updateVTKDataSet(self):
+    def _updateVTKDataSet(self, plotBasedDualGrid):
         """Apply the vcs data to _vtkDataSet, creating it if necessary."""
         raise NotImplementedError("Missing override.")
 
@@ -272,10 +278,13 @@ class Pipeline2D(IPipeline2D):
 
         # Preprocess the input scalar data:
         self._updateScalarData()
+        self._min = self._data1.min()
+        self._max = self._data1.max()
         self._scalarRange = vcs.minmax(self._data1)
 
         # Create/update the VTK dataset.
-        self._updateVTKDataSet()
+        plotBasedDualGrid = kargs.get('plot_based_dual_grid', True)
+        self._updateVTKDataSet(plotBasedDualGrid)
 
         # Update the results:
         self._resultDict["vtk_backend_grid"] = self._vtkDataSet
@@ -308,19 +317,65 @@ class Pipeline2D(IPipeline2D):
         """Overrides baseclass implementation."""
         self._data1 = self._context().trimData2D(self._originalData1)
         self._data2 = self._context().trimData2D(self._originalData2)
-        self._min = self._data1.min()
-        self._max = self._data1.max()
 
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
+    def _updateVTKDataSet(self, plotBasedDualGrid):
+        """
+        """
+        if (plotBasedDualGrid):
+            hasCellData = self._data1.hasCellData()
+            dualGrid = (hasCellData != self._needsCellData)
+        else:
+            dualGrid = False
         genGridDict = vcs2vtk.genGrid(self._data1, self._data2, self._gm,
                                       deep=False,
                                       grid=self._vtkDataSet,
-                                      geo=self._vtkGeoTransform)
-
+                                      geo=self._vtkGeoTransform, genVectors=self._needsVectors,
+                                      dualGrid=dualGrid)
         self._data1 = genGridDict["data"]
+        self._data2 = genGridDict["data2"]
         self._updateFromGenGridDict(genGridDict)
 
+    def _createPolyDataFilter(self):
+        """This is only used when we use the grid stored in the file for all plots."""
+        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
+        if self._hasCellData == self._needsCellData:
+            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
+        elif self._hasCellData:
+            # use cells but needs points
+            c2p = vtk.vtkCellDataToPointData()
+            c2p.PassCellDataOn()
+            c2p.SetInputData(self._vtkDataSet)
+            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
+        else:
+            # use points but needs cells
+            p2c = vtk.vtkPointDataToCellData()
+            p2c.SetInputData(self._vtkDataSet)
+            # For contouring duplicate points seem to confuse it
+            self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
+        self._vtkPolyDataFilter.Update()
+        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
+        # create an actor and a renderer for the surface mesh.
+        # this is used for displaying point information using the hardware selection
+        mapper = vtk.vtkPolyDataMapper()
+        mapper.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
+        act = vtk.vtkActor()
+        act.SetMapper(mapper)
+        vp = self._resultDict.get(
+            'ratio_autot_viewport',
+            [self._template.data.x1, self._template.data.x2,
+             self._template.data.y1, self._template.data.y2])
+        plotting_dataset_bounds = self.getPlottingBounds()
+        surface_renderer, xScale, yScale = self._context().fitToViewport(
+            act, vp,
+            wc=plotting_dataset_bounds, geoBounds=self._vtkDataSet.GetBounds(),
+            geo=self._vtkGeoTransform,
+            priority=self._template.data.priority,
+            create_renderer=True)
+        self._resultDict['surface_renderer'] = surface_renderer
+        self._resultDict['surface_scale'] = (xScale, yScale)
+        if (surface_renderer):
+            surface_renderer.SetDraw(False)
+
     def _updateFromGenGridDict(self, genGridDict):
         """Overrides baseclass implementation."""
         self._vtkDataSet = genGridDict['vtk_backend_grid']
@@ -329,7 +384,7 @@ class Pipeline2D(IPipeline2D):
         self._useContinents = genGridDict['continents']
         self._dataWrapModulo = genGridDict['wrap']
         self._vtkGeoTransform = genGridDict['geo']
-        self._useCellScalars = genGridDict['cellData']
+        self._hasCellData = genGridDict['cellData']
 
     def _createMaskedDataMapper(self):
         """Overrides baseclass implementation."""
@@ -338,11 +393,11 @@ class Pipeline2D(IPipeline2D):
         if color is not None:
             color = self.getColorIndexOrRGBA(_colorMap, color)
         self._maskedDataMapper = vcs2vtk.putMaskOnVTKGrid(
-            self._data1, self._vtkDataSet, color, self._useCellScalars,
+            self._data1, self._vtkDataSet, color, self._hasCellData,
             deep=False)
 
         self._resultDict["vtk_backend_missing_mapper"] = (
-            self._maskedDataMapper, color, self._useCellScalars)
+            self._maskedDataMapper, color, self._hasCellData)
 
     def getPlottingBounds(self):
         """gm.datawc if it is set or dataset_bounds if there is not geographic projection
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index b8c4eaead4e706c4c38146a67ebcb359deeab0c5..8c09596ed3e725bc35dffdd8e88c04eb740225f1 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -1,72 +1,54 @@
-from .pipeline import Pipeline
+from .pipeline2d import Pipeline2D
 
 import vcs
 from vcs import vcs2vtk
 import vtk
 
 
-class VectorPipeline(Pipeline):
+class VectorPipeline(Pipeline2D):
 
     """Implementation of the Pipeline interface for VCS vector plots."""
 
     def __init__(self, gm, context_):
         super(VectorPipeline, self).__init__(gm, context_)
+        self._needsCellData = False
+        self._needsVectors = True
 
-    def plot(self, data1, data2, tmpl, grid, transform, **kargs):
+    def _plotInternal(self):
         """Overrides baseclass implementation."""
         # Preserve time and z axis for plotting these inof in rendertemplate
-        geo = None  # to make flake8 happy
         projection = vcs.elements["projection"][self._gm.projection]
-        returned = {}
-        taxis = data1.getTime()
-        if data1.ndim > 2:
-            zaxis = data1.getAxis(-3)
+        taxis = self._originalData1.getTime()
+        scaleFactor = 1.0
+
+        if self._originalData1.ndim > 2:
+            zaxis = self._originalData1.getAxis(-3)
         else:
             zaxis = None
 
-        # Ok get3 only the last 2 dims
-        data1 = self._context().trimData2D(data1)
-        data2 = self._context().trimData2D(data2)
-
         scale = 1.0
         lat = None
         lon = None
 
-        latAccessor = data1.getLatitude()
-        lonAccesrsor = data1.getLongitude()
+        latAccessor = self._data1.getLatitude()
+        lonAccessor = self._data1.getLongitude()
         if latAccessor:
             lat = latAccessor[:]
-        if lonAccesrsor:
-            lon = lonAccesrsor[:]
-
-        gridGenDict = vcs2vtk.genGridOnPoints(data1, self._gm, deep=False, grid=grid,
-                                              geo=transform, data2=data2)
-
-        data1 = gridGenDict["data"]
-        data2 = gridGenDict["data2"]
-        geo = gridGenDict["geo"]
-
-        grid = gridGenDict['vtk_backend_grid']
-        xm = gridGenDict['xm']
-        xM = gridGenDict['xM']
-        ym = gridGenDict['ym']
-        yM = gridGenDict['yM']
-        continents = gridGenDict['continents']
-        self._dataWrapModulo = gridGenDict['wrap']
-        geo = gridGenDict['geo']
-
-        if geo is not None:
+        if lonAccessor:
+            lon = lonAccessor[:]
+
+        if self._vtkGeoTransform is not None:
             newv = vtk.vtkDoubleArray()
             newv.SetNumberOfComponents(3)
-            newv.InsertTupleValue(0, [lon.min(), lat.min(), 0])
-            newv.InsertTupleValue(1, [lon.max(), lat.max(), 0])
+            newv.InsertTypedTuple(0, [lon.min(), lat.min(), 0])
+            newv.InsertTypedTuple(1, [lon.max(), lat.max(), 0])
 
-            vcs2vtk.projectArray(newv, projection, [xm, xM, ym, yM])
+            vcs2vtk.projectArray(newv, projection, self._vtkDataSetBounds)
             dimMin = [0, 0, 0]
             dimMax = [0, 0, 0]
 
-            newv.GetTupleValue(0, dimMin)
-            newv.GetTupleValue(1, dimMax)
+            newv.GetTypedTuple(0, dimMin)
+            newv.GetTypedTuple(1, dimMax)
 
             maxDimX = max(dimMin[0], dimMax[0])
             maxDimY = max(dimMin[1], dimMax[1])
@@ -81,19 +63,6 @@ class VectorPipeline(Pipeline):
         else:
             scale = 1.0
 
-        returned["vtk_backend_grid"] = grid
-        returned["vtk_backend_geo"] = geo
-        missingMapper = vcs2vtk.putMaskOnVTKGrid(data1, grid, None, False,
-                                                 deep=False)
-
-        # None/False are for color and cellData
-        # (sent to vcs2vtk.putMaskOnVTKGrid)
-        returned["vtk_backend_missing_mapper"] = (missingMapper, None, False)
-
-        w = vcs2vtk.generateVectorArray(data1, data2, grid)
-
-        grid.GetPointData().AddArray(w)
-
         # Vector attempt
         l = self._gm.line
         if l is None:
@@ -117,24 +86,80 @@ class VectorPipeline(Pipeline):
         arrow.SetOutputPointsPrecision(vtk.vtkAlgorithm.DOUBLE_PRECISION)
         arrow.FilledOff()
 
+        polydata = self._vtkPolyDataFilter.GetOutput()
+        vectors = polydata.GetPointData().GetVectors()
+
+        if self._gm.scaletype == 'constant' or\
+           self._gm.scaletype == 'constantNNormalize' or\
+           self._gm.scaletype == 'constantNLinear':
+            scaleFactor = scale * 2.0 * self._gm.scale
+        else:
+            scaleFactor = 1.0
+
         glyphFilter = vtk.vtkGlyph2D()
-        glyphFilter.SetInputData(grid)
-        glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vectors")
+        glyphFilter.SetInputData(polydata)
+        glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vector")
         glyphFilter.SetSourceConnection(arrow.GetOutputPort())
         glyphFilter.SetVectorModeToUseVector()
 
         # Rotate arrows to match vector data:
         glyphFilter.OrientOn()
+        glyphFilter.ScalingOn()
 
-        # Scale to vector magnitude:
         glyphFilter.SetScaleModeToScaleByVector()
-        glyphFilter.SetScaleFactor(scale * 2.0 * self._gm.scale)
 
-        # These are some unfortunately named methods. It does *not* clamp the
-        # scale range to [min, max], but rather remaps the range
-        # [min, max] --> [0, 1].
-        glyphFilter.ClampingOn()
-        glyphFilter.SetRange(0.01, 1.0)
+        if self._gm.scaletype == 'normalize' or self._gm.scaletype == 'linear' or\
+           self._gm.scaletype == 'constantNNormalize' or self._gm.scaletype == 'constantNLinear':
+
+            # Find the min and max vector magnitudes
+            maxNorm = vectors.GetMaxNorm()
+
+            if maxNorm == 0:
+                maxNorm = 1.0
+
+            if self._gm.scaletype == 'normalize' or self._gm.scaletype == 'constantNNormalize':
+                scaleFactor /= maxNorm
+
+            if self._gm.scaletype == 'linear' or self._gm.scaletype == 'constantNLinear':
+                minNorm = None
+                maxNorm = None
+
+                noOfComponents = vectors.GetNumberOfComponents()
+                for i in range(0, vectors.GetNumberOfTuples()):
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
+
+                    if (minNorm is None or norm < minNorm):
+                        minNorm = norm
+                    if (maxNorm is None or norm > maxNorm):
+                        maxNorm = norm
+
+                if maxNorm == 0:
+                    maxNorm = 1.0
+
+                scalarArray = vtk.vtkDoubleArray()
+                scalarArray.SetNumberOfComponents(1)
+                scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
+
+                oldRange = maxNorm - minNorm
+                oldRange = 1.0 if oldRange == 0.0 else oldRange
+
+                # New range min, max.
+                newRangeValues = self._gm.scalerange
+                newRange = newRangeValues[1] - newRangeValues[0]
+
+                for i in range(0, vectors.GetNumberOfTuples()):
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
+                    newValue = (((norm - minNorm) * newRange) / oldRange) + newRangeValues[0]
+                    scalarArray.SetValue(i, newValue)
+                    polydata.GetPointData().SetScalars(scalarArray)
+
+                # Scale to vector magnitude:
+                # NOTE: Currently we compute our own scaling factor since VTK does
+                # it by clamping the values > max to max  and values < min to min
+                # and not remap the range.
+                glyphFilter.SetScaleModeToScaleByScalar()
+
+        glyphFilter.SetScaleFactor(scaleFactor)
 
         mapper = vtk.vtkPolyDataMapper()
 
@@ -152,22 +177,20 @@ class VectorPipeline(Pipeline):
 
         plotting_dataset_bounds = vcs2vtk.getPlottingBounds(
             vcs.utils.getworldcoordinates(self._gm,
-                                          data1.getAxis(-1),
-                                          data1.getAxis(-2)),
-            [xm, xM, ym, yM], geo)
+                                          self._data1.getAxis(-1),
+                                          self._data1.getAxis(-2)),
+            self._vtkDataSetBounds, self._vtkGeoTransform)
         x1, x2, y1, y2 = plotting_dataset_bounds
-        if geo is None:
+        if self._vtkGeoTransform is None:
             wc = plotting_dataset_bounds
         else:
             xrange = list(act.GetXRange())
             yrange = list(act.GetYRange())
             wc = [xrange[0], xrange[1], yrange[0], yrange[1]]
 
-        if (transform and kargs.get('ratio', '0') == 'autot'):
-            returned['ratio_autot_viewport'] = self._processRatioAutot(tmpl, grid)
-
-        vp = returned.get('ratio_autot_viewport',
-                          [tmpl.data.x1, tmpl.data.x2, tmpl.data.y1, tmpl.data.y2])
+        vp = self._resultDict.get('ratio_autot_viewport',
+                                  [self._template.data.x1, self._template.data.x2,
+                                   self._template.data.y1, self._template.data.y2])
         # look for previous dataset_bounds different than ours and
         # modify the viewport so that the datasets are alligned
         # Hack to fix the case when the user does not specify gm.datawc_...
@@ -189,31 +212,30 @@ class VectorPipeline(Pipeline):
         dataset_renderer, xScale, yScale = self._context().fitToViewport(
             act, vp,
             wc=wc,
-            priority=tmpl.data.priority,
+            priority=self._template.data.priority,
             create_renderer=True)
-        returned['dataset_renderer'] = dataset_renderer
-        returned['dataset_scale'] = (xScale, yScale)
-        bounds = [min(xm, xM), max(xm, xM), min(ym, yM), max(ym, yM)]
-        kwargs = {'vtk_backend_grid': grid,
-                  'dataset_bounds': bounds,
-                  'plotting_dataset_bounds': plotting_dataset_bounds}
-        if ('ratio_autot_viewport' in returned):
+        kwargs = {'vtk_backend_grid': self._vtkDataSet,
+                  'dataset_bounds': self._vtkDataSetBounds,
+                  'plotting_dataset_bounds': plotting_dataset_bounds,
+                  'vtk_backend_geo': self._vtkGeoTransform}
+        if ('ratio_autot_viewport' in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
-        returned.update(self._context().renderTemplate(
-            tmpl, data1,
+        self._resultDict.update(self._context().renderTemplate(
+            self._template, self._data1,
             self._gm, taxis, zaxis, **kwargs))
 
         if self._context().canvas._continents is None:
-            continents = False
-        if continents:
+            self._useContinents = False
+        if self._useContinents:
             continents_renderer, xScale, yScale = self._context().plotContinents(
                 plotting_dataset_bounds, projection,
-                self._dataWrapModulo, vp, tmpl.data.priority,
-                vtk_backend_grid=grid,
-                dataset_bounds=bounds)
-            returned["continents_renderer"] = continents_renderer
-        returned["vtk_backend_actors"] = [[act, plotting_dataset_bounds]]
-        returned["vtk_backend_glyphfilters"] = [glyphFilter]
-        returned["vtk_backend_luts"] = [[None, None]]
-
-        return returned
+                self._dataWrapModulo, vp, self._template.data.priority,
+                vtk_backend_grid=self._vtkDataSet,
+                dataset_bounds=self._vtkDataSetBounds)
+        self._resultDict["vtk_backend_actors"] = [[act, plotting_dataset_bounds]]
+        self._resultDict["vtk_backend_glyphfilters"] = [glyphFilter]
+        self._resultDict["vtk_backend_luts"] = [[None, None]]
+
+    def _updateContourLevelsAndColors(self):
+        """Overrides baseclass implementation."""
+        pass
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index acea94c047c2ecc223dbe8b2df17f8c13c09cc54..2c49bfd30d2bda86271c9bf267e9a26176605f58 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -131,7 +131,7 @@ def process_src(nm, code):
 class Gv(object):
 
     """
- Class:	Gv				# Vector
+ Class: Gv              # Vector
 
  Description of Gv Class:
     The vector graphics method displays a vector plot of a 2D vector field. Vectors
@@ -145,76 +145,76 @@ class Gv(object):
     entry.
 
  Other Useful Functions:
-         a=vcs.init()			# Constructor
-         a.show('vector')		# Show predefined vector graphics methods
-         a.show('line')			# Show predefined VCS line objects
-         a.setcolormap("AMIP")		# Change the VCS color Map
-         a.vector(s1, s2, v,'default')	# Plot data 's1', and 's2' with vector 'v'
+         a=vcs.init()           # Constructor
+         a.show('vector')       # Show predefined vector graphics methods
+         a.show('line')         # Show predefined VCS line objects
+         a.setcolormap("AMIP")      # Change the VCS color Map
+         a.vector(s1, s2, v,'default')  # Plot data 's1', and 's2' with vector 'v'
                                          and 'default' template
-         a.update()		 	# Updates the VCS Canvas at user's request
-         a.mode=1, or 0 	 	# If 1, then automatic update, else if
+         a.update()         # Updates the VCS Canvas at user's request
+         a.mode=1, or 0         # If 1, then automatic update, else if
                                           0, then use update function to
                                           update the VCS Canvas.
 
  Example of Use:
     a=vcs.init()
     To Create a new instance of vector use:
-     vc=a.createvector('new','quick')	# Copies content of 'quick' to 'new'
-     vc=a.createvector('new') 		# Copies content of 'default' to 'new'
+     vc=a.createvector('new','quick')   # Copies content of 'quick' to 'new'
+     vc=a.createvector('new')       # Copies content of 'default' to 'new'
 
     To Modify an existing vector use:
      vc=a.getvector('AMIP_psl')
 
-    vc.list()  				# Will list all the vector attribute values
-    vc.projection='linear'   		# Can only be 'linear'
+    vc.list()               # Will list all the vector attribute values
+    vc.projection='linear'          # Can only be 'linear'
     lon30={-180:'180W',-150:'150W',0:'Eq'}
     vc.xticlabels1=lon30
     vc.xticlabels2=lon30
-    vc.xticlabels(lon30, lon30)  	# Will set them both
+    vc.xticlabels(lon30, lon30)     # Will set them both
     vc.xmtics1=''
     vc.xmtics2=''
-    vc.xmtics(lon30, lon30)  		# Will set them both
+    vc.xmtics(lon30, lon30)         # Will set them both
     vc.yticlabels1=lat10
     vc.yticlabels2=lat10
-    vc.yticlabels(lat10, lat10)  	# Will set them both
+    vc.yticlabels(lat10, lat10)     # Will set them both
     vc.ymtics1=''
     vc.ymtics2=''
-    vc.ymtics(lat10, lat10)  		# Will set them both
+    vc.ymtics(lat10, lat10)         # Will set them both
     vc.datawc_y1=-90.0
     vc.datawc_y2=90.0
     vc.datawc_x1=-180.0
     vc.datawc_x2=180.0
-    vc.datawc(-90, 90, -180, 180)  	# Will set them all
+    vc.datawc(-90, 90, -180, 180)   # Will set them all
     xaxisconvert='linear'
     yaxisconvert='linear'
-    vc.xyscale('linear', 'area_wt')  	# Will set them both
+    vc.xyscale('linear', 'area_wt')     # Will set them both
 
     Specify the line style:
-     vc.line=0 				# Same as vc.line='solid'
-     vc.line=1 				# Same as vc.line='dash'
-     vc.line=2 				# Same as vc.line='dot'
-     vc.line=3 				# Same as vc.line='dash-dot'
-     vc.line=4 				# Same as vc.line='long-dot'
+     vc.line=0              # Same as vc.line='solid'
+     vc.line=1              # Same as vc.line='dash'
+     vc.line=2              # Same as vc.line='dot'
+     vc.line=3              # Same as vc.line='dash-dot'
+     vc.line=4              # Same as vc.line='long-dot'
 
     Specify the line color of the vectors:
-     vc.linecolor=16   			# Color range: 16 to 230, default line color is black
-     vc.linewidth=1   			# Width range: 1 to 100, default size is 1
+     vc.linecolor=16            # Color range: 16 to 230, default line color is black
+     vc.linewidth=1             # Width range: 1 to 100, default size is 1
 
     Specify the vector scale factor:
-     vc.scale=2.0   			# Can be an integer or float
+     vc.scale=2.0               # Can be an integer or float
 
     Specify the vector alignment:
-     vc.alignment=0			# Same as vc.alignment='head'
-     vc.alignment=1			# Same as vc.alignment='center'
-     vc.alignment=2			# Same as vc.alignment='tail'
+     vc.alignment=0         # Same as vc.alignment='head'
+     vc.alignment=1         # Same as vc.alignment='center'
+     vc.alignment=2         # Same as vc.alignment='tail'
 
     Specify the vector type:
-      vc.type=0   			# Same as vc.type='arrow head'
-      vc.type=1   			# Same as vc.type='wind barbs'
-      vc.type=2   			# Same as vc.type='solid arrow head'
+      vc.type=0             # Same as vc.type='arrow head'
+      vc.type=1             # Same as vc.type='wind barbs'
+      vc.type=2             # Same as vc.type='solid arrow head'
 
     Specify the vector reference:
-      vc.reference=4    		# Can be an integer or float
+      vc.reference=4            # Can be an integer or float
 """
     __slots__ = [
         'name',
@@ -244,6 +244,9 @@ class Gv(object):
         'type',
         'reference',
         'colormap',
+        'scaleoptions',
+        'scaletype',
+        'scalerange',
         '_name',
         '_xaxisconvert',
         '_yaxisconvert',
@@ -270,9 +273,13 @@ class Gv(object):
         '_type',
         '_reference',
         '_colormap',
+        '_scaleoptions',
+        '_scaletype',
+        '_scalerange',
     ]
 
     colormap = VCS_validation_functions.colormap
+    scaleoptions = ('off', 'constant', 'normalize', 'linear', 'constantNNormalize', 'constantNLinear')
 
     def _getname(self):
         return self._name
@@ -528,6 +535,30 @@ class Gv(object):
         self._alignment = value
     alignment = property(_getalignment, _setalignment)
 
+    def _getscaletype(self):
+        return self._scaletype
+
+    def _setscaletype(self, value):
+        value = VCS_validation_functions.checkInStringList(self,
+                                                           'scaletype',
+                                                           value,
+                                                           self.scaleoptions)
+        self._scaletype = value
+    scaletype = property(_getscaletype, _setscaletype)
+
+    def _getscalerange(self):
+        return self._scalerange
+
+    def _setscalerange(self, value):
+        value = VCS_validation_functions.checkListOfNumbers(self,
+                                                            'scalerange',
+                                                            value,
+                                                            minvalue=0.0,
+                                                            minelements=2,
+                                                            maxelements=2)
+        self._scalerange = value
+    scalerange = property(_getscalerange, _setscalerange)
+
     def __init__(self, Gv_name, Gv_name_src='default'):
                 #                                                         #
                 ###########################################################
@@ -568,6 +599,8 @@ class Gv(object):
             self._datawc_timeunits = "days since 2000"
             self._datawc_calendar = 135441
             self._colormap = None
+            self._scaletype = self.scaleoptions[4]
+            self._scalerange = [0.1, 1.0]
         else:
             if isinstance(Gv_name_src, Gv):
                 Gv_name_src = Gv_name_src.name
@@ -583,7 +616,9 @@ class Gv(object):
                         'datawc_x2', 'xaxisconvert', 'yaxisconvert',
                         'line', 'linecolor', 'linewidth',
                         'datawc_timeunits', 'datawc_calendar', 'colormap',
-                        'scale', 'alignment', 'type', 'reference']:
+                        'scale', 'alignment', 'type', 'reference', 'scaletype',
+                        'scalerange']:
+
                 setattr(self, att, getattr(src, att))
         # Ok now we need to stick in the elements
         vcs.elements["vector"][Gv_name] = self
@@ -660,6 +695,8 @@ class Gv(object):
         print "alignment = ", self.alignment
         print "type = ", self.type
         print "reference = ", self.reference
+        print "scaletype = ", self.scaletype
+        print "scalerange = ", self.scalerange
 
     ##########################################################################
     #                                                                           #
@@ -798,6 +835,9 @@ class Gv(object):
             fp.write("%s.linecolor = %s\n" % (unique_name, self.linecolor))
             fp.write("%s.linewidth = %s\n" % (unique_name, self.linewidth))
             fp.write("%s.scale = %s\n" % (unique_name, self.scale))
+            fp.write("%s.scaletype = %s\n" % (unique_name, self.scaletype))
+            fp.write("%s.scalerange = %s\n" % (unique_name, self.scalerange))
+            fp.write("%s.scaleoptions = %s\n" % (unique_name, self.scaleoptions))
             fp.write("%s.alignment = '%s'\n" % (unique_name, self.alignment))
             fp.write("%s.type = '%s'\n" % (unique_name, self.type))
             fp.write("%s.reference = %s\n\n" % (unique_name, self.reference))
@@ -814,5 +854,5 @@ class Gv(object):
 
 
 ###############################################################################
-#        END OF FILE							      #
+#        END OF FILE                                  #
 ###############################################################################
diff --git a/Packages/vcs/vcs/xmldocs.py b/Packages/vcs/vcs/xmldocs.py
index d1754e0f303bb0edbf4a1d5c1c984792c2f68865..5993178ace629556aa0f8919c11f098d09fc48dc 100644
--- a/Packages/vcs/vcs/xmldocs.py
+++ b/Packages/vcs/vcs/xmldocs.py
@@ -1,71 +1,128 @@
 plot_keywords_doc = """
-xaxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -1 dim axis
-yaxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -2 dim axis, only if slab has more than 1D
-zaxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -3 dim axis, only if slab has more than 2D
-taxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -4 dim axis, only if slab has more than 3D
-waxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -5 dim axis, only if slab has more than 4D
-xrev :: (bool) () reverse x axis
-yrev :: (bool) () reverse y axis, only if slab has more than 1D
-xarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of x axis
-yarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of y axis, only if var has more than 1D
-zarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of z axis, only if var has more than 2D
-tarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of t axis, only if var has more than 3D
-warray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of w axis, only if var has more than 4D
-continents :: (int) () continents type number
-name :: (str) () replaces variable name on plot
-time "" (cdtime.comptime/cdtime.reltime/cdtime.abstime) () replaces time name on plot
-units :: (str) () replaces units value on plot
-ymd :: (str) () replaces year/month/day on plot
-hms :: (str) () replaces hh/mm/ss on plot
-file_comment :: (str) () replaces file_comment on plot
-xbounds :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of x axis bounds values
-ybounds :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of y axis bounds values (if exist)
-xname :: (str) () replace xaxis name on plot
-yname :: (str) () replace yaxis name on plot (if exists)
-zname :: (str) () replace zaxis name on plot (if exists)
-tname :: (str) () replace taxis name on plot (if exists)
-wname :: (str) () replace waxis name on plot (if exists)
-xunits :: (str) () replace xaxis units on plot
-yunits :: (str) () replace yaxis units on plot (if exists)
-zunits :: (str) () replace zaxis units on plot (if exists)
-tunits :: (str) () replace taxis units on plot (if exists)
-wunits :: (str) () replace waxis units on plot (if exists)
-xweights :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () replace xaxis weights used for computing mean
-yweights :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () replace xaxis weights used for computing mean
-comment1 :: (str) () replaces comment1 on plot
-comment2 :: (str) () replaces comment2 on plot
-comment3 :: (str) () replaces comment3 on plot
-comment4 :: (str) () replaces comment4 on plot
-long_name :: (str) () replaces long_name on plot
-grid :: (cdms2.grid.TransientRectGrid) () replaces array grid (if exists)
-bg :: (bool/int) () plots in background mode
-ratio :: (int/str) () sets the y/x ratio ,if passed as a string with 't' at the end, will aslo moves the ticks
+:param xaxis: Axis object to replace the slab -1 dim axis
+:param yaxis: Axis object to replace the slab -2 dim axis, only if slab has more than 1D
+:param zaxis: Axis object to replace the slab -3 dim axis, only if slab has more than 2D
+:param taxis: Axis object to replace the slab -4 dim axis, only if slab has more than 3D
+:param waxis: Axis object to replace the slab -5 dim axis, only if slab has more than 4D
+:param xrev: reverse x axis
+:param yrev: reverse y axis, only if slab has more than 1D
+:param xarray: Values to use instead of x axis
+:param yarray: Values to use instead of y axis, only if var has more than 1D
+:param zarray: Values to use instead of z axis, only if var has more than 2D
+:param tarray: Values to use instead of t axis, only if var has more than 3D
+:param warray: Values to use instead of w axis, only if var has more than 4D
+:param continents: continents type number
+:param name: replaces variable name on plot
+:param time: replaces time name on plot
+:param units: replaces units value on plot
+:param ymd: replaces year/month/day on plot
+:param hms: replaces hh/mm/ss on plot
+:param file_comment: replaces file_comment on plot
+:param xbounds: Values to use instead of x axis bounds values
+:param ybounds: Values to use instead of y axis bounds values (if exist)
+:param xname: replace xaxis name on plot
+:param yname: replace yaxis name on plot (if exists)
+:param zname: replace zaxis name on plot (if exists)
+:param tname: replace taxis name on plot (if exists)
+:param wname: replace waxis name on plot (if exists)
+:param xunits: replace xaxis units on plot
+:param yunits: replace yaxis units on plot (if exists)
+:param zunits: replace zaxis units on plot (if exists)
+:param tunits: replace taxis units on plot (if exists)
+:param wunits: replace waxis units on plot (if exists)
+:param xweights: replace xaxis weights used for computing mean
+:param yweights: replace xaxis weights used for computing mean
+:param comment1: replaces comment1 on plot
+:param comment2: replaces comment2 on plot
+:param comment3: replaces comment3 on plot
+:param comment4: replaces comment4 on plot
+:param long_name: replaces long_name on plot
+:param grid: replaces array grid (if exists)
+:param bg: plots in background mode
+:param ratio: sets the y/x ratio ,if passed as a string with 't' at the end, will aslo moves the ticks
+:type xaxis: cdms2.axis.TransientAxis
+:type yaxis: cdms2.axis.TransientAxis
+:type zaxis: cdms2.axis.TransientAxis
+:type taxis: cdms2.axis.TransientAxis
+:type waxis: cdms2.axis.TransientAxis
+:type xrev: bool
+:type yrev: bool
+:type xarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type yarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type zarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type tarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type warray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type continents: int
+:type name: str
+:type time: cdtime.comptime/cdtime.reltime/cdtime.abstime
+:type units: str
+:type ymd: str
+:type hms: str
+:type file_comment: str
+:type xbounds: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type ybounds: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type xname: str
+:type yname: str
+:type zname: str
+:type tname: str
+:type wname: str
+:type xunits: str
+:type yunits: str
+:type zunits: str
+:type tunits: str
+:type wunits: str
+:type xweights: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type yweights: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type comment1: str
+:type comment2: str
+:type comment3: str
+:type comment4: str
+:type long_name: str
+:type grid: cdms2.grid.TransientRectGrid
+:type bg: bool/int
+:type ratio: int/str
 """  # noqa
 
 data_time = """
-    datawc_timeunits :: (str) ('days since 2000') units to use when disaplaying time dimension auto tick
-    datawc_calendar:: (int) (135441) calendar to use when displaying time dimension auto tick, default is proleptic gregorian calendar
+:param datawc_timeunits: (Ex: 'days since 2000') units to use when disaplaying time dimension auto tick
+:type datawc_timeunits: str
+:param datawc_calendar: (Ex: 135441) calendar to use when displaying time dimension auto tick, default is proleptic gregorian calendar
+:type datawc_calendar: int
 """  # noqa
 graphics_method_core_notime = """
-    xmtics1 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 1st side of y axis
-    xmtics2 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 2nd side of y axis
-    ymtics1 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 1st side of y axis
-    ymtics2 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 2nd side of y axis
-    xticlabels1 :: (str/{float:str}) ('*') values for labels on 1st side of x axis
-    xticlabels2 :: (str/{float:str}) ('*') values for labels on 2nd side of x axis
-    yticlabels1 :: (str/{float:str}) ('*') values for labels on 1st side of y axis
-    yticlabels2 :: (str/{float:str}) ('*') values for labels on 2nd side of y axis
-    projection :: (str/vcs.projection.Proj) ('default') projection to use, name or object
-    datawc_x1 :: (float) (1.E20) first value of xaxis on plot
-    datawc_x2 :: (float) (1.E20) second value of xaxis on plot
-    datawc_y1 :: (float) (1.E20) first value of yaxis on plot
-    datawc_y2 :: (float) (1.E20) second value of yaxis on plot
+:param xmtics1: (Ex: '') dictionary with location of intermediate tics as keys for 1st side of y axis
+:type xmtics1: str/{float:str}
+:param xmtics2: (Ex: '') dictionary with location of intermediate tics as keys for 2nd side of y axis
+:type xmtics2: str/{float:str}
+:param ymtics1: (Ex: '') dictionary with location of intermediate tics as keys for 1st side of y axis
+:type ymtics1: str/{float:str}
+:param ymtics2: (Ex: '') dictionary with location of intermediate tics as keys for 2nd side of y axis
+:type ymtics2: str/{float:str}
+:param xticlabels1: (Ex: '*') values for labels on 1st side of x axis
+:type xticlabels1: str/{float:str}
+:param xticlabels2: (Ex: '*') values for labels on 2nd side of x axis
+:type xticlabels2: str/{float:str}
+:param yticlabels1: (Ex: '*') values for labels on 1st side of y axis
+:type yticlabels1: str/{float:str}
+:param yticlabels2: (Ex: '*') values for labels on 2nd side of y axis
+:type yticlabels2: str/{float:str}
+:param projection: (Ex: 'default') projection to use, name or object
+:type projection: str/vcs.projection.Proj
+:param datawc_x1: (Ex: 1.E20) first value of xaxis on plot
+:type datawc_x1: float
+:param datawc_x2: (Ex: 1.E20) second value of xaxis on plot
+:type datawc_x2: float
+:param datawc_y1: (Ex: 1.E20) first value of yaxis on plot
+:type datawc_y1: float
+:param datawc_y2: (Ex: 1.E20) second value of yaxis on plot
+:type datawc_y2: float
 """  # noqa
 graphics_method_core = """%s
 %s""" % (graphics_method_core_notime, data_time)
-axisconvert = """    %saxisconvert :: (str) ('linear') converting %saxis linear/log/log10/ln/exp/area_wt\n """
-xaxisconvert = axisconvert % ("x", "x")
-yaxisconvert = axisconvert % ("y", "y")
+axisconvert = """:param {axis}axisconvert: (Ex: 'linear') converting {axis}axis linear/log/log10/ln/exp/area_wt
+:type {axis}axisconvert: str\n"""
+xaxisconvert = axisconvert.format(axis="x")
+yaxisconvert = axisconvert.format(axis="y")
 axesconvert = xaxisconvert + yaxisconvert
 colorsdoc = """Options:::
            color1 :: (int) (16) value for color_1
@@ -176,29 +233,38 @@ markerdoc = """
 #############################################################################
 
 create_GM_input = """
-    new_GM_name :: (str) (0) name of the new graphics method object. If no name is given, then one will be created for use.
-    source_GM_name :: (str) (1) copy the contents of the source object to the newly created one. If no name is given, then the 'default' graphics methond contents is copied over to the new object.
+:param new_GM_name: (Ex: 'my_awesome_gm') name of the new graphics method object. If no name is given, then one will be created for use.
+:type new_GM_name: str
+:param source_GM_name: (Ex: 'default') copy the contents of the source object to the newly created one. If no name is given, then the 'default' graphics methond contents is copied over to the new object.
+:type source_GM_name: str
 """  # noqa
 
 get_GM_input = """
-    GM_name :: (str) (0) retrieve the graphics method object of the given name. If no name is given, then retrieve the 'default' graphics method.
+:param GM_name: (Ex: 'default') retrieve the graphics method object of the given name. If no name is given, then retrieve the 'default' graphics method.
+:type GM_name: str
 """  # noqa
 
 plot_1D_input = """
-       slab :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) (0) Data at least 1D, last dimension will be plotted
+:param slab: (Ex: [1, 2]) Data at least 1D, last dimension will be plotted
+:type slab: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
 """  # noqa
 
 plot_2D_input = """
-       slab :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) (0) Data at least 2D, last 2 dimensions will be plotted
+:param slab: (Ex: [[0, 1]]) Data at least 2D, last 2 dimensions will be plotted
+:type slab: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
 """  # noqa
 
 plot_2_1D_input = """
-       slab_or_primary_object :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list/vcs.fillarea.Tf/vcs.line.Tl/vcs.marker.Tm/vcs.textcombined.Tc) (None) Data at least 1D, last dimension(s) will be plotted, or primary vcs object
+:param slab_or_primary_object: Data at least 1D, last dimension(s) will be plotted, or secondary vcs object
+:type slab_or_primary_object: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list/vcs.fillarea.Tf/vcs.line.Tl/vcs.marker.Tm/vcs.textcombined.Tc
 """  # noqa
 plot_2_1D_options = """
-       slab2 :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) (None) Data at least 1D, last dimension(s) will be plotted
-       template :: (str/vcs.template.P) ('default') vcs template
-       gm :: (str/vcs.boxfill.Gfb/vcs.isofill.Gfi/vcs.isoline.Gi/vcs.meshfill.Gfm/vcs.vector.Gv/vcs.scatter.GSp/vcs.outline.Go/vcs.outline.Gfo/vcs.taylor.Gtd/vcs.xvsy.GXY/vcs.xyvsy.GXy/vcs.yxvsx.GYx/vcsaddons.core.VCSaddon/vcs.continents.Gcon) ('default') graphic method to use
+:param slab2: Data at least 1D, last dimension(s) will be plotted
+:type slab2: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:param template: ('default') vcs template to use
+:type template: str/vcs.template.P
+:param gm: (Ex: 'default') graphic method to use
+:type gm: str/vcs.boxfill.Gfb/vcs.isofill.Gfi/vcs.isoline.Gi/vcs.meshfill.Gfm/vcs.vector.Gv/vcs.scatter.GSp/vcs.outline.Go/vcs.outline.Gfo/vcs.taylor.Gtd/vcs.unified1d.G1d/vcsaddons.core.VCSaddon
 """  # noqa
 #############################################################################
 #                                                                           #
@@ -206,41 +272,42 @@ plot_2_1D_options = """
 #                                                                           #
 #############################################################################
 plot_output = """
-       display ::  (vcs.displayplot.Dp) (0) no default
+:return: Display Plot object representing the plot.
+:rtype: vcs.displayplot.Dp
 """
 
 boxfill_output = """
-       boxfill ::  (vcs.boxfill.Gfb) (0) no default
+       boxfill :: (Ex: 0) no default
 """
 
 isofill_output = """
-       isofill ::  (vcs.isofill.Gfi) (0) no default
+       isofill :: (Ex: 0) no default
 """
 
 isoline_output = """
-       isoline ::  (vcs.isoline.Gi) (0) no default
+       isoline :: (Ex: 0) no default
 """
 
 yxvsx_output = """
-       yxvsx ::  (vcs.yxvsx.GYx) (0) no default
+       yxvsx :: (Ex: 0) no default
 """
 
 xyvsy_output = """
-       xyvsy ::  (vcs.xyvsy.GXy) (0) no default
+       xyvsy :: (Ex: 0) no default
 """
 
 xvsy_output = """
-       xvsy ::  (vcs.xvsy.GXY) (0) no default
+       xvsy :: (Ex: 0) no default
 """
 
 scatter_output = """
-       scatter ::  (vcs.scatter.GSp) (0) no default
+       scatter :: (Ex: 0) no default
 """
 
 outfill_output = """
-       outfill ::  (vcs.outfill.Gfo) (0) no default
+       outfill :: (Ex: 0) no default
 """
 
 outline_output = """
-       outline ::  (vcs.outline.Go) (0) no default
+       outline :: (Ex: 0) no default
 """
diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py
index eadaa435c1b12d1fcf02bb6ef017d62a41b7aa13..7136d36aa3753287331423cff0197d98f79f33ac 100644
--- a/Packages/vcsaddons/Lib/__init__.py
+++ b/Packages/vcsaddons/Lib/__init__.py
@@ -1,12 +1,129 @@
 gms = {}
 import histograms
+import polar
 import EzTemplate
 import yxvsxfill
 import continents
+import vcs
+
 
 def createyxvsxfill(name=None,source='default',x=None,template=None):
     return yxvsxfill.Gyf(name,source=source,x=x,template=template)
+
+
 def createhistogram(name=None,source='default',x=None,template=None):
     return histograms.Ghg(name,source=source,x=x,template=template)
+
+
 def createusercontinents(name=None,source="default",x=None,template=None):
     return continents.Guc(name,source=source,x=x,template=template)
+
+
+def createpolar(name=None, source="default", x=None, template=None):
+    if "polar_oned" not in gms:
+        init_polar()
+    return polar.Gpo(name, source=source, x=x, template=template)
+
+
+def getpolar(name=None):
+    if "polar_oned" not in gms:
+        init_polar()
+    if name in gms["polar_oned"]:
+        return gms["polar_oned"][name]
+    raise KeyError("No Polar GM exists with name '%s'" % name)
+
+
+def init_polar():
+    # Create nice polar template
+    try:
+        t = vcs.createtemplate("polar_oned")
+        t.data.x1 = .2
+        t.data.x2 = .8
+        t.data.y1 = .2
+        t.data.y2 = .8
+
+        t.legend.x1 = .85
+        t.legend.x2 = 1
+        t.legend.y1 = .15
+        t.legend.y2 = .85
+
+        dash = vcs.createline()
+        dash.type = "dash"
+        dot = vcs.createline()
+        dot.type = "dot"
+        t.xtic1.line = dash
+        t.ytic1.line = dot
+
+        left_aligned = vcs.createtextorientation()
+        left_aligned.halign = "left"
+        left_aligned.valign = "half"
+        t.legend.textorientation = left_aligned
+    except vcs.vcsError:
+        # Template already exists
+        pass
+    # Create some nice default polar GMs
+    degree_polar = polar.Gpo("degrees", template="polar_oned")
+    degree_polar.datawc_x1 = 0
+    degree_polar.datawc_x2 = 360
+    degree_polar.xticlabels1 = {
+        i: str(i) for i in range(0, 360, 45)
+    }
+
+    clock_24 = polar.Gpo("diurnal", template="polar_oned")
+    clock_24.datawc_x1 = 0
+    clock_24.datawc_x2 = 24
+    clock_24.clockwise = True
+    # 6 AM on the right
+    clock_24.theta_offset = -6
+    clock_24.xticlabels1 = {
+        i: str(i) for i in range(0, 24, 3)
+    }
+
+    clock_24_meridiem = polar.Gpo("diurnal_12_hour", source="diurnal", template="polar_oned")
+    clock_24_meridiem.xticlabels1 = {
+        0: "12 AM",
+        3: "3 AM",
+        6: "6 AM",
+        9: "9 AM",
+        12: "12 PM",
+        15: "3 PM",
+        18: "6 PM",
+        21: "9 PM"
+    }
+
+    clock_12 = polar.Gpo("semidiurnal", source="diurnal", template="polar_oned")
+    clock_12.datawc_x2 = 12
+    clock_12.xticlabels1 = {
+        i: str(i) for i in range(3, 13, 3)
+    }
+    # 3 on the right
+    clock_12.theta_offset = -3
+
+    annual_cycle = polar.Gpo("annual_cycle", template="polar_oned")
+    annual_cycle.datawc_x1 = 1
+    annual_cycle.datawc_x2 = 13
+    annual_cycle.clockwise = True
+    annual_cycle.xticlabels1 = {
+        1: "Jan",
+        2: "Feb",
+        3: "Mar",
+        4: "Apr",
+        5: "May",
+        6: "Jun",
+        7: "Jul",
+        8: "Aug",
+        9: "Sep",
+        10: "Oct",
+        11: "Nov",
+        12: "Dec"
+    }
+    # Put December on the top
+    annual_cycle.theta_offset = -2
+
+    seasonal = polar.Gpo("seasonal", template="polar_oned")
+    seasonal.datawc_x1 = 0
+    seasonal.datawc_x2 = 4
+    seasonal.xticlabels1 = {0: "DJF", 1: "MAM", 2: "JJA", 3: "SON"}
+    seasonal.clockwise = True
+    # DJF on top
+    seasonal.theta_offset = -1
diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py
index 38932c48699321e3d9873a3d9d890aa218f41c63..0a75da12573970c18a9faabd76595a3372b46e84 100644
--- a/Packages/vcsaddons/Lib/core.py
+++ b/Packages/vcsaddons/Lib/core.py
@@ -1,18 +1,18 @@
 import vcsaddons,vcs
 import numpy
 
-class VCSaddon:
+class VCSaddon(object):
     def __init__(self,name=None,source='default',x=None,template=None):
         self._saves={}
         self.g_nslabs=1
-        if not self.g_name in vcsaddons.gms.keys():
-            vcsaddons.gms[self.g_name]={}
+        if not self.g_type in vcsaddons.gms.keys():
+            vcsaddons.gms[self.g_type]={}
         if name is None:
             cont = True
             while cont:
                 num= numpy.random.randint(1000)
-                nm = 'Ghg_'+str(num)
-                if not nm in vcsaddons.gms[self.g_name].keys():
+                nm = self.g_type + '_'+str(num)
+                if not nm in vcsaddons.gms[self.g_type].keys():
                     name = nm
                     cont = False
 
@@ -20,7 +20,7 @@ class VCSaddon:
             self.x=vcs.init()
         else:
             self.x=x
-            
+
         if template is None:
             self.template = self.x.gettemplate()
         elif isinstance(template,str):
@@ -30,7 +30,7 @@ class VCSaddon:
         else:
             raise "Error did not know what to do with template: %s" % template
 
-        if name in vcsaddons.gms[self.g_name].keys():
+        if name in vcsaddons.gms[self.g_type].keys():
             raise "Error graphic method %s already exists" % name
 
         if source=='default':
@@ -38,26 +38,33 @@ class VCSaddon:
             self.datawc_x2=1.e20
             self.datawc_y1=1.e20
             self.datawc_y2=1.e20
-            self.xmtics1='*'
-            self.xmtics2='*'
-            self.ymtics1='*'
-            self.ymtics2='*'
+            self.colormap="default"
+            self.xmtics1=''
+            self.xmtics2=''
+            self.ymtics1=''
+            self.ymtics2=''
             self.xticlabels1='*'
             self.xticlabels2='*'
             self.yticlabels1='*'
             self.yticlabels2='*'
             self.xaxisconvert= 'linear'
             self.yaxisconvert= 'linear'
+            self.color_1 = 16
+            self.color_2 = 239
             self.legend = None
             self.projection='linear'
         else:
-            gm =  vcsaddons.gms[self.g_name].get(source,None)
-            if gm is None:
-                raise "error could not find graphic method %s (of type %s)" % (source, self.g_name)
+            if isinstance(source, (str, unicode)):
+                gm = vcsaddons.gms[self.g_type].get(source,None)
+                if gm is None:
+                    raise "error could not find graphic method %s (of type %s)" % (source, self.g_type)
+            else:
+                gm = source
             self.datawc_x1=gm.datawc_x1
             self.datawc_x2=gm.datawc_x2
             self.datawc_y1=gm.datawc_y1
-            self.datawc_y2=gm.datawc_x2
+            self.datawc_y2=gm.datawc_y2
+            self.colormap=gm.colormap
             self.xmtics1=gm.xmtics1
             self.xmtics2=gm.xmtics2
             self.ymtics1=gm.ymtics1
@@ -68,11 +75,13 @@ class VCSaddon:
             self.yticlabels2=gm.yticlabels2
             self.xaxisconvert=gm.xaxisconvert
             self.yaxisconvert= gm.yaxisconvert
+            self.color_1 = gm.color_1
+            self.color_2 = gm.color_2
             self.legend = gm.legend
             self.projection=gm.projection
         self.name = name
-        vcsaddons.gms[self.g_name][name]=self
-        
+        vcsaddons.gms[self.g_type][name]=self
+
 
     def list(self):
         print 'graphics method = ',self.g_name
@@ -97,9 +106,9 @@ class VCSaddon:
         raise "Plot function not implemented for graphic method type: %s" % self.g_name
 
     def prep_plot(self,xmn,xmx,ymn,ymx):
-        
+
         self.save()
-        
+
         if self.datawc_x1!=1.e20:
             xmn = self.datawc_x1
         if self.datawc_x2!=1.e20:
@@ -122,6 +131,30 @@ class VCSaddon:
                         setattr(self,axes+sec+n,vcs.mklabels(sc))
         return xmn,xmx,ymn,ymx
 
+    def plot_internal(self, slab=None, slab2=None, template=None, bg=0, x=None, **kwargs):
+        """
+        Used by vcs to properly build a display plot for this graphics method.
+        """
+        if x is None:
+            x = self.x
+
+        if slab2 is not None:
+            displays = self.plot(slab, slab2, template, bg, x, **kwargs)
+        else:
+            displays = self.plot(slab, template, bg, x, **kwargs)
+
+        for display in displays:
+            # Remove the display from the canvas
+            if display.name in x.display_names:
+                x.display_names.remove(display.name)
+        nm, src = x.check_name_source(None, "default", "display")
+        display = vcs.displayplot.Dp(nm)
+        display.g_name = self.name
+        display.g_type = self.g_type
+        display.array = [slab, slab2]
+        return display
+
+
     def save(self,attribute = None):
         if attribute is not None:
             self._saves[attribute] = getattr(self,attribute)
@@ -139,15 +172,15 @@ class VCSaddon:
             self._saves={}
 
 
-    def getgm(self,name):
+    def getgm(self,source="default"):
         gm = None
-        for nm in vcsaddons.gms[self.g_name].keys():
-            if name == nm:
-                return vcsaddons.gms[self.g_name][nm]
+        for nm in vcsaddons.gms[self.g_type].keys():
+            if source == nm:
+                return vcsaddons.gms[self.g_type][nm]
 
         if gm is None:
-            raise "Could not find graphic method %s named: %s" % (self.g_type, name)
+            raise "Could not find graphic method %s named: %s" % (self.g_type, source)
 
     def creategm(self,name,source='default'):
         return self.__init__(name,source=source,x=self.x,template=self.template)
-        
+
diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 6e974f2a6312e9e149e51a68f21ce35c1ecef56a..0273529cdd84771fee69903dba93a4aa9738e485 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -1,116 +1,218 @@
 from core import VCSaddon
-import cdms2,MV2,vcs,vcsaddons
+import cdms2
+import MV2
+import numpy
+import vcs
+import vcsaddons
+
 
 class Ghg(VCSaddon):
-    def __init__(self,name=None,source='default',x=None,template = None):
-        self.g_name='Ghg'
-        self.g_type='histogram'
-        VCSaddon.__init__(self,name,source,x,template)
+
+    def __init__(self, name=None, source='default', x=None, template=None):
+        self.g_name = 'Ghg'
+        self.g_type = 'histogram'
+        VCSaddon.__init__(self, name, source, x, template)
         if source == 'default':
-            self.fillareastyles = ['solid',]
-            self.fillareaindices = [1,]
-            self.fillareacolors = [252,]
-            self.line = ['solid',]
-            self.linewidth=[1.0,]
-            self.linecolors=[241,]
+            self.line = []
+            self.linewidth = []
+            self.linecolors = []
+            self.fillareastyles = []
+            self.fillareaindices = []
+            self.fillareacolors = []
+            self.bins = []
         else:
-            gm = vcsaddons.gms[self.g_name][source]
-            self.fillareastyle= gm.fillareastyles
-            self.fillareaindices = gm.fillareaindices
-            self.fillareacolors = gm.fillareacolors
+            if isinstance(source, (str, unicode)):
+                gm = vcsaddons.gms[self.g_type][source]
+            else:
+                gm = source
             self.line = gm.line
             self.linewidth = gm.linewidth
             self.linecolors = gm.linecolors
-            
+            self.fillareastyles = gm.fillareastyles
+            self.fillareaindices = gm.fillareaindices
+            self.fillareacolors = gm.fillareacolors
+            self.bins = gm.bins
 
     def list(self):
-        print '---------- Histogram (Ghg) member (attribute) listings ----------'
-        print 'Canvas Mode = ',self.x.mode
-        VCSaddon.list(self)
-        print 'fillareastyles = ', self.fillareastyles
-        print 'fillareaindices = ', self.fillareaindices
-        print 'fillareacolors = ', self.fillareacolors
-        print 'line = ', self.line
-        print 'linewidth = ', self.linewidth
-        print 'linecolors = ', self.linecolors
-        
-    
-    def plot(self,data,template = None, bg=0, x=None):
+        print '---------- Histogram (Ghg) member (attribute) listings ----------'  # pragma: no cover
+        print 'Canvas Mode = ', self.x.mode  # pragma: no cover
+        VCSaddon.list(self)  # pragma: no cover
+        print 'fillareastyles = ', self.fillareastyles  # pragma: no cover
+        print 'fillareaindices = ', self.fillareaindices  # pragma: no cover
+        print 'fillareacolors = ', self.fillareacolors  # pragma: no cover
+        print 'line = ', self.line  # pragma: no cover
+        print 'linewidth = ', self.linewidth  # pragma: no cover
+        print 'linecolors = ', self.linecolors  # pragma: no cover
+        print 'bins = ', self.bins  # pragma: no cover
+
+    def plot(self, data, template=None, bg=0, x=None, **kwargs):
         if x is None:
             x = self.x
         if template is None:
             template = self.template
-        elif isinstance(template,str):
+        elif isinstance(template, str):
             template = x.gettemplate(template)
-        elif not vcs.istemplate(template):
-            raise "Error did not know what to do with template: %s" % template
-        
-        if not isinstance(data,cdms2.tvariable.TransientVariable):
-            mode= cdms2.getAutoBounds()
-            cdms2.setAutoBounds("on")
-            data = MV2.array(data)
-            data.getAxis(-1).getBounds()
-            cdms2.setAutoBounds(mode)
-
-        while data.rank()>1:
-            data = data[0]
+        elif not vcs.istemplate(template):  # pragma: no cover
+            raise ValueError("Error did not know what to do with template: %s" % template)  # pragma: no cover
+        try:
+            data_name = data.title
+        except AttributeError:
+            try:
+                data_name = data.long_name
+            except AttributeError:
+                try:
+                    data_name = data.id + data.units
+                except AttributeError:
+                    try:
+                        data_name = data.id
+                    except AttributeError:
+                        data_name = "array"
+
+        # We'll just flatten the data... if they want to be more precise, should pass in more precise data
+        if isinstance(data, cdms2.avariable.AbstractVariable):
+            data = data.asma()
+        data = data.flatten()
 
         # ok now we have a good x and a good data
-        nbars = len(data)
+        if not self.bins:
+            self.bins = vcs.utils.mkscale(*vcs.minmax(data))
+
+        # Sort the bins
+        self.bins.sort()
+
+        # Prune duplicates
+        pruned_bins = []
+        for bin in self.bins:
+            if pruned_bins and numpy.allclose(bin, pruned_bins[-1]):
+                continue
+            pruned_bins.append(bin)
+        self.bins = pruned_bins
+        data_bins = numpy.digitize(data, self.bins) - 1
+        binned = [data[data_bins==i] for i in range(len(self.bins))]
+        means = []
+        stds = []
+
+        max_possible_deviance = 0
+
+        for ind, databin in enumerate(binned):
+            if len(databin) > 0:
+                means.append(databin.mean())
+                stds.append(databin.std())
+            else:
+                means.append(0)
+                stds.append(0)
+            if len(self.bins) > ind + 1:
+                max_possible_deviance = max(means[ind] - self.bins[ind], self.bins[ind + 1] - means[ind], max_possible_deviance)
+            else:
+                max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance)
+        color_values = [std / max_possible_deviance for std in stds]
+        y_values = [len(databin) for databin in binned]
+        nbars = len(self.bins) - 1
 
         # create the primitive
         fill = x.createfillarea()
         line = x.createline()
-        fill.viewport = [template.data.x1,template.data.x2,template.data.y1,template.data.y2]
-        line.viewport = [template.data.x1,template.data.x2,template.data.y1,template.data.y2]
-        axb = data.getAxis(0).getBounds()
-        xmn,xmx = vcs.minmax(axb)
-        ymn,ymx = vcs.minmax(data)
-        
-        xmn,xmx,ymn,ymx = self.prep_plot(xmn,xmx,ymn,ymx)
-        
-        fill.worldcoordinate=[xmn,xmx,ymn,ymx]
-        line.worldcoordinate=[xmn,xmx,ymn,ymx]
-        
-        styles =[]
+        fill.viewport = [
+            template.data.x1, template.data.x2, template.data.y1, template.data.y2]
+        line.viewport = [
+            template.data.x1, template.data.x2, template.data.y1, template.data.y2]
+
+        vcs_min_max = vcs.minmax(self.bins)
+        if numpy.allclose(self.datawc_x1, 1e20):
+            xmn = vcs_min_max[0]
+        else:
+            xmn = self.datawc_x1
+
+        if numpy.allclose(self.datawc_x2, 1e20):
+            xmx = vcs_min_max[1]
+        else:
+            xmx = self.datawc_x2
+
+        if numpy.allclose(self.datawc_y2, 1e20):
+            # Make the y scale be slightly larger than the largest bar
+            ymx = max(y_values) * 1.25
+        else:
+            ymx = self.datawc_y2
+
+        if numpy.allclose(self.datawc_y1, 1e20):
+            ymn = 0
+        else:
+            ymn = self.datawc_y1
+
+        fill.worldcoordinate = [xmn, xmx, ymn, ymx]
+        line.worldcoordinate = [xmn, xmx, ymn, ymx]
+
+        styles = []
         cols = []
         indices = []
         lt = []
-        lw =[]
+        lw = []
         lc = []
         xs = []
         ys = []
-        
+
+        levels = [.1 * i for i in range(11)]
+
+        # Extend fillarea and line attrs to levels
+        if self.fillareastyles:
+            while len(self.fillareastyles) < (len(levels) - 1):
+                self.fillareastyles.append(self.fillareastyles[-1])
+        else:
+            self.fillareastyles = ["solid"] * (len(levels) - 1)
+
+        if self.fillareacolors:
+            while len(self.fillareacolors) < (len(levels) - 1):
+                self.fillareacolors.append(self.fillareacolors[-1])
+        else:
+            for lev in levels[:-1]:
+                self.fillareacolors.append(int((self.color_2 - self.color_1) * lev) + self.color_1)
+
+        if self.fillareaindices:
+            while len(self.fillareaindices) < (len(levels) - 1):
+                self.fillareaindices.append(self.fillareaindices[-1])
+        else:
+            self.fillareaindices = [1] * (len(levels) - 1)
+
+        if self.line:
+            while len(self.line) < (len(levels) - 1):
+                self.line.append(self.line[-1])
+        else:
+            self.line = ["solid"] * (len(levels) - 1)
+
+        if self.linewidth:
+            while len(self.linewidth) < (len(levels) - 1):
+                self.linewidth.append(self.linewidth[-1])
+        else:
+            self.linewidth = [1] * (len(levels) - 1)
+
+        if self.linecolors:
+            while len(self.linecolors) < (len(levels) - 1):
+                self.linecolors.append(self.linecolors[-1])
+        else:
+            self.linecolors = ["black"] * (len(levels) - 1)
 
         for i in range(nbars):
-            if i < len(self.fillareastyles):
-                styles.append(self.fillareastyles[i])
-            else:
-                styles.append(self.fillareastyles[-1])
-            if i < len(self.fillareacolors):
-                cols.append(self.fillareacolors[i])
-            else:
-                cols.append(self.fillareacolors[-1])
-            if i < len(self.fillareaindices):
-                indices.append(self.fillareaindices[i])
-            else:
-                indices.append(self.fillareaindices[-1])
-            if i < len(self.line):
-                lt.append( self.line[i])
-            else:
-                lt.append(self.line[-1])
-            if i < len(self.linewidth):
-                lw.append( self.linewidth[i])
+            # Calculate level for bar
+            value = color_values[i]
+            for lev_ind in range(len(levels)):
+                if levels[lev_ind] > value:
+                    if lev_ind > 0:
+                        lev_ind -= 1
+                        break
+                    else:
+                        # Shouldn't ever get here since level 0 is 0
+                        assert False  # pragma: no cover
             else:
-                lw.append(self.linewidth[-1])
-            if i < len(self.line):
-                lc.append( self.linecolors[i])
-            else:
-                lc.append(self.linecolors[-1])
-            
-            xs.append( [axb[i][0],axb[i][1],axb[i][1],axb[i][0],axb[i][0]])
-            ys.append( [0,0,data[i],data[i],0])
+                assert False  # pragma: no cover
+            styles.append(self.fillareastyles[lev_ind])
+            cols.append(self.fillareacolors[lev_ind])
+            indices.append(self.fillareaindices[lev_ind])
+            lt.append(self.line[lev_ind])
+            lw.append(self.linewidth[lev_ind])
+            lc.append(self.linecolors[lev_ind])
 
+            xs.append([self.bins[i], self.bins[i], self.bins[i + 1], self.bins[i + 1]])
+            ys.append([0, y_values[i], y_values[i], 0])
 
         fill.style = styles
         fill.x = xs
@@ -118,20 +220,43 @@ class Ghg(VCSaddon):
         fill.style
         fill.index = indices
         fill.color = cols
+        fill.colormap = self.colormap
         line.x = xs
         line.y = ys
         line.type = lt
         line.width = lw
         line.color = lc
-
         displays = []
-        displays.append(x.plot(fill,bg=bg))
-        displays.append(x.plot(line,bg=bg))
 
-        x.worldcoordinate = fill.worldcoordinate 
-        dsp = template.plot(data,self,bg=bg)
+        x_axis = cdms2.createAxis(self.bins, id=data_name)
+        y_axis = cdms2.createAxis(vcs.mkscale(ymn, ymx), id="bin_size")
+
+        displays.append(x.plot(fill, bg=bg, render=False))
+        arr = MV2.masked_array(y_values)
+        arr.setAxis(0, x_axis)
+        dsp = template.plot(x, arr, self, bg=bg, X=x_axis, Y=y_axis)
         for d in dsp:
-            displays.append(d)
+            if d is not None:
+                displays.append(d)
+        legend_labels = {0: "No Variance",
+                         .1: "",
+                         .2: "",
+                         .3: "",
+                         .4: "",
+                         .5: "",
+                         .6: "",
+                         .7: "",
+                         .8: "",
+                         .9: "",
+                         1: "High Variance"}
+        template.drawColorBar(self.fillareacolors, levels,
+                              legend=legend_labels, x=x,
+                              style=self.fillareastyles,
+                              index=self.fillareaindices)
+
+        displays.append(x.plot(line, bg=bg))
+
+        x.worldcoordinate = fill.worldcoordinate
 
         self.restore()
         return displays
diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py
new file mode 100644
index 0000000000000000000000000000000000000000..900b3491682b2aa4019a46d65cd0f146c1365efd
--- /dev/null
+++ b/Packages/vcsaddons/Lib/polar.py
@@ -0,0 +1,555 @@
+import vcs
+import numpy
+import vcsaddons
+
+
+def circle_points(center, radius, points=75, ratio=1):
+    """
+    Generates the coordinates of a circle in x list and y list.
+    """
+    x = []
+    y = []
+    if ratio > 1:
+        ymul = ratio
+        xmul = 1
+    else:
+        xmul = ratio
+        ymul = 1
+    for i in range(points):
+        x.append(center[0] + xmul * radius * numpy.cos(float(i) / points * numpy.pi * 2))
+        y.append(center[1] + ymul * radius * numpy.sin(float(i) / points * numpy.pi * 2))
+    x.append(x[0])
+    y.append(y[0])
+    return x, y
+
+
+def convert_arrays(var, theta):
+    """
+    Normalizes valid input options to two lists of lists of values and a list of names.
+
+    Handles:
+    list/tuple of list/tuples/arrays
+    (X,N,2) array
+    (N,2) array
+    list/tuple, list/tuple
+    """
+    magnitudes = []
+    thetas = []
+    names = []
+    if theta is None:
+        # var must be list/tuple of arrays or an array
+        if isinstance(var, (list, tuple)):
+            for arr in var:
+                if isinstance(arr, numpy.ndarray):
+                    if len(arr.shape) == 2 and arr.shape[1] == 2:
+                        magnitudes.append(arr[..., 0].tolist())
+                        thetas.append(arr[..., 1].tolist())
+                        try:
+                            names.append(arr.id)
+                        except AttributeError:
+                            names.append(None)
+                    else:
+                        raise ValueError("Array is wrong shape; expected 2d array of 2-long elements,"
+                                         " got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1]))
+                else:
+                    if len(arr) == 2:
+                        # Might be just a pair
+                        if not isinstance(arr[0], (list, tuple)):
+                            magnitudes.append([arr[0]])
+                            thetas.append([arr[1]])
+                            names.append(None)
+                            continue
+                    mag_group = []
+                    theta_group = []
+                    for val in arr:
+                        if len(val) != 2:
+                            raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples,"
+                                             " got %s of %d elements." % (type(val).__name__, len(val)))
+                        mag_group.append(val[0])
+                        theta_group.append(val[1])
+                        names.append(None)
+                    magnitudes.append(mag_group)
+                    thetas.append(theta_group)
+        else:
+            if len(var.shape) == 3:
+                for i in range(var.shape[0]):
+                    magnitudes.append(var[i, ..., 0].tolist())
+                    thetas.append(var[i, ..., 1].tolist())
+                    try:
+                        names.append(var[i].id)
+                    except AttributeError:
+                        names.append(None)
+            else:
+                magnitudes = [var[..., 0].tolist()]
+                thetas = [var[..., 1].tolist()]
+                try:
+                    names.append(var.id)
+                except AttributeError:
+                    names.append(None)
+    else:
+        magnitudes = []
+        if isinstance(var, (list, tuple)):
+            if isinstance(var[0], (list, tuple, numpy.ndarray)):
+                for v in var:
+                    magnitudes.append(list(v))
+                    try:
+                        names.append(v.id)
+                    except AttributeError:
+                        names.append(None)
+            else:
+                magnitudes = [var]
+                names.append(None)
+        elif isinstance(var, numpy.ndarray):
+            if len(var.shape) == 1:
+                magnitudes = [list(var)]
+                try:
+                    names.append(var.id)
+                except AttributeError:
+                    names.append(None)
+            elif len(var.shape) == 2:
+                for i in range(var.shape[0]):
+                    magnitudes.append(list(var[i]))
+                    try:
+                        names.append(var[i].id)
+                    except AttributeError:
+                        names.append(None)
+            else:
+                raise ValueError("Array is wrong shape; expected 1d array or 2d array,"
+                                 " got %dd array." % len(var.shape))
+
+        thetas = []
+        if isinstance(theta, (list, tuple)):
+            if isinstance(theta[0], (list, tuple, numpy.ndarray)):
+                thetas = [list(v) for v in theta]
+            else:
+                thetas = [theta]
+        elif isinstance(theta, numpy.ndarray):
+            if len(theta.shape) == 1:
+                thetas = [list(theta)]
+            elif len(theta.shape) == 2:
+                thetas = [list(theta[i]) for i in range(theta.shape[0])]
+            else:
+                raise ValueError("Array is wrong shape; expected 1d array or 2d array,"
+                                 " got %dd array." % len(var.shape))
+        if not names:
+            names = [None] * len(var)
+    return magnitudes, thetas, names
+
+
+class Gpo(vcsaddons.core.VCSaddon):
+    def __init__(self, name=None, source="default", x=None, template=None):
+        self.g_name = "Gpo"
+        self.g_type = "polar_oned"
+        super(Gpo, self).__init__(name, source, x, template)
+        self.x = None
+        if source == "default":
+            self.markersizes = [3]
+            self.markercolors = ["black"]
+            self.markers = ["dot"]
+            self.markercolorsource = "group"
+            self.clockwise = False
+            self.theta_offset = 0
+            self.magnitude_ticks = "*"
+            self.magnitude_mintics = None
+            self.magnitude_tick_angle = 0
+            self.theta_tick_count = 6
+            self.group_names = []
+            self.draw_lines = False
+            self.connect_groups = False
+            self.linecolors = ["black"]
+            self.lines = ["solid"]
+            self.linewidths = [1]
+            self.markerpriority = 2
+            self.linepriority = 1
+            # Nice default labels
+            self.xticlabels1 = {
+                0: "0 (2pi)",
+                numpy.pi / 4: "pi/4",
+                numpy.pi / 2: "pi/2",
+                numpy.pi * 3 / 4.: "3pi/4",
+                numpy.pi: "pi",
+                numpy.pi * 5 / 4.: "5pi/4",
+                numpy.pi * 3 / 2.: "3pi/2",
+                numpy.pi * 7 / 4.: "7pi/4",
+            }
+        else:
+            if isinstance(source, (str, unicode)):
+                gm = vcsaddons.gms[self.g_type][source]
+            else:
+                gm = source
+            self.markersizes = gm.markersizes
+            self.markercolors = gm.markercolors
+            self.markers = gm.markers
+            self.markercolorsource = gm.markercolorsource
+            self.markerpriority = gm.markerpriority
+            self.clockwise = gm.clockwise
+            self.draw_lines = gm.draw_lines
+            self.linecolors = gm.linecolors
+            self.linewidths = gm.linewidths
+            self.linepriority = gm.linepriority
+            self.lines = gm.lines
+            self.connect_groups = gm.connect_groups
+            self.theta_offset = gm.theta_offset
+            self.magnitude_ticks = gm.magnitude_ticks
+            self.magnitude_mintics = gm.magnitude_mintics
+            self.magnitude_tick_angle = gm.magnitude_tick_angle
+            self.theta_tick_count = gm.theta_tick_count
+            self.group_names = gm.group_names
+        self.to_cleanup = []
+
+    def create_text(self, tt, to):
+        tc = vcs.createtext(Tt_source=tt, To_source=to)
+        self.to_cleanup.append(tc.Tt)
+        self.to_cleanup.append(tc.To)
+        return tc
+
+    def text_orientation_for_angle(self, theta, source="default"):
+        """
+        Generates a text orientation that will align text to look good depending on quadrant.
+        """
+        # Normalize to [0, 2*pi)
+        while 0 > theta:
+            theta += 2 * numpy.pi
+        while 2 * numpy.pi <= theta:
+            theta -= 2 * numpy.pi
+
+        if 0 < theta < numpy.pi:
+            valign = "bottom"
+        elif 0 == theta or numpy.pi == theta:
+            valign = "half"
+        else:
+            valign = "top"
+
+        if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta:
+            halign = "left"
+        elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta):
+            halign = "center"
+        else:
+            halign = "right"
+
+        # Build new text table
+        to = vcs.createtextorientation(source=source)
+        to.valign = valign
+        to.halign = halign
+        self.to_cleanup.append(to)
+        return to
+
+    def magnitude_from_value(self, value, minmax):
+        if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20):
+            min, max = minmax
+        else:
+            min, max = self.datawc_y1, self.datawc_y2
+
+        return (value - min) / float(max - min)
+
+    def theta_from_value(self, value):
+        if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
+            # No scale specified, just use the value as theta
+            return value + self.theta_offset
+
+        minval = self.datawc_x1
+        maxval = self.datawc_x2
+        offset = self.theta_offset / float(maxval - minval)
+
+        pct_val = (value - minval) / float(maxval - minval) + offset
+        rad_val = numpy.pi * 2 * pct_val
+        if self.clockwise:
+            # Reflect the value
+            rad_val *= -1
+        return rad_val
+
+    def plot(self, var, theta=None, template=None, bg=0, x=None):
+        """
+        Plots a polar plot of your data.
+
+        If var is an ndarray with the second dimension being 2, it will use the first value
+        as magnitude and the second as theta.
+
+        Otherwise, if theta is provided, it uses var as magnitude and the theta given.
+        """
+        if x is None:
+            if self.x is None:
+                self.x = vcs.init()
+            x = self.x
+        if template is None:
+            template = self.template
+
+        if self.markercolorsource.lower() not in ("group", "magnitude", "theta"):
+            raise ValueError("polar.markercolorsource must be one of: 'group', 'magnitude', 'theta'")
+
+        magnitudes, thetas, names = convert_arrays(var, theta)
+        if self.group_names:
+            names = self.group_names
+            while len(names) < len(magnitudes):
+                names.append(None)
+
+        flat_magnitude = []
+        for i in magnitudes:
+            flat_magnitude.extend(i)
+        flat_theta = []
+        for i in thetas:
+            flat_theta.extend(i)
+
+        canvas = x
+        # Determine aspect ratio for plotting the circle
+        canvas_info = canvas.canvasinfo()
+        # Calculate aspect ratio of window
+        window_aspect = canvas_info["width"] / float(canvas_info["height"])
+        if window_aspect > 1:
+            ymul = window_aspect
+            xmul = 1
+        else:
+            ymul = 1
+            xmul = window_aspect
+        # Use window_aspect to adjust size of template.data
+        x0, x1 = template.data.x1, template.data.x2
+        y0, y1 = template.data.y1, template.data.y2
+
+        xdiff = abs(x1 - x0)
+        ydiff = abs(y1 - y0)
+
+        center = x0 + xdiff / 2., y0 + ydiff / 2.
+        diameter = min(xdiff, ydiff)
+        radius = diameter / 2.
+        plot_kwargs = {"render": False, "bg": bg, "donotstoredisplay": True}
+        # Outer line
+        if template.box1.priority > 0:
+            outer = vcs.createline(source=template.box1.line)
+            x, y = circle_points(center, radius, ratio=window_aspect)
+            outer.x = x
+            outer.y = y
+            canvas.plot(outer, **plot_kwargs)
+            del vcs.elements["line"][outer.name]
+
+        if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20):
+            if self.magnitude_ticks == "*":
+                m_scale = vcs.mkscale(*vcs.minmax(flat_magnitude))
+            else:
+                if isinstance(self.magnitude_ticks, (str, unicode)):
+                    ticks = vcs.elements["list"][self.magnitude_ticks]
+                else:
+                    ticks = self.magnitude_ticks
+                m_scale = ticks
+        else:
+            m_scale = vcs.mkscale(self.datawc_y1, self.datawc_y2)
+
+        if template.ytic1.priority > 0:
+            m_ticks = vcs.createline(source=template.ytic1.line)
+            m_ticks.x = []
+            m_ticks.y = []
+
+            if template.ylabel1.priority > 0:
+                to = self.text_orientation_for_angle(self.magnitude_tick_angle,
+                                                     source=template.ylabel1.textorientation)
+                m_labels = self.create_text(template.ylabel1.texttable, to)
+                m_labels.x = []
+                m_labels.y = []
+                m_labels.string = []
+                if self.yticlabels1 == "*":
+                    mag_labels = vcs.mklabels(m_scale)
+                else:
+                    mag_labels = self.yticlabels1
+            else:
+                m_labels = None
+
+            for lev in m_scale:
+                lev_radius = radius * self.magnitude_from_value(lev, (m_scale[0], m_scale[-1]))
+                x, y = circle_points(center, lev_radius, ratio=window_aspect)
+                if m_labels is not None:
+                    if lev in mag_labels:
+                        m_labels.string.append(mag_labels[lev])
+                        m_labels.x.append(xmul * lev_radius * numpy.cos(self.magnitude_tick_angle) + center[0])
+                        m_labels.y.append(ymul * lev_radius * numpy.sin(self.magnitude_tick_angle) + center[1])
+                m_ticks.x.append(x)
+                m_ticks.y.append(y)
+            canvas.plot(m_ticks, **plot_kwargs)
+            del vcs.elements["line"][m_ticks.name]
+            if m_labels is not None:
+                canvas.plot(m_labels, **plot_kwargs)
+                del vcs.elements["textcombined"][m_labels.name]
+
+        if template.ymintic1.priority > 0 and self.magnitude_mintics is not None:
+            mag_mintics = vcs.createline(source=template.ymintic1.line)
+            mag_mintics.x = []
+            mag_mintics.y = []
+
+            mintics = self.magnitude_mintics
+            if isinstance(mintics, (str, unicode)):
+                mintics = vcs.elements["list"][mintics]
+
+            for mag in mintics:
+                mintic_radius = radius * self.magnitude_from_value(mag, (m_scale[0], m_scale[-1]))
+                x, y = circle_points(center, mintic_radius, ratio=window_aspect)
+                mag_mintics.x.append(x)
+                mag_mintics.y.append(y)
+            canvas.plot(mag_mintics, **plot_kwargs)
+            del vcs.elements["line"][mag_mintics.name]
+
+        if self.xticlabels1 == "*":
+            if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
+                tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4))
+                tick_labels = {t: str(t) for t in tick_thetas}
+            else:
+                d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count)
+                tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta)
+                tick_labels = vcs.mklabels(tick_thetas)
+        else:
+            tick_thetas = self.xticlabels1.keys()
+            tick_labels = self.xticlabels1
+
+        if template.xtic1.priority > 0:
+            t_ticks = vcs.createline(source=template.xtic1.line)
+            t_ticks.x = []
+            t_ticks.y = []
+
+            if template.xlabel1.priority > 0:
+                t_labels = []
+                theta_labels = tick_labels
+            else:
+                t_labels = None
+
+            for t in tick_thetas:
+                angle = self.theta_from_value(t)
+                x0 = center[0] + (xmul * radius * numpy.cos(angle))
+                x1 = center[0]
+                y0 = center[1] + (ymul * radius * numpy.sin(angle))
+                y1 = center[1]
+                if t_labels is not None:
+                    label = self.create_text(template.xlabel1.texttable,
+                                             self.text_orientation_for_angle(angle,
+                                                                             source=template.xlabel1.textorientation))
+                    label.string = [theta_labels[t]]
+                    label.x = [x0]
+                    label.y = [y0]
+                    t_labels.append(label)
+                t_ticks.x.append([x0, x1])
+                t_ticks.y.append([y0, y1])
+            canvas.plot(t_ticks, **plot_kwargs)
+            del vcs.elements["line"][t_ticks.name]
+            if t_labels is not None:
+                for l in t_labels:
+                    canvas.plot(l, **plot_kwargs)
+                    del vcs.elements["textcombined"][l.name]
+
+        values = vcs.createmarker()
+        values.type = self.markers
+        values.size = self.markersizes
+        values.color = self.markercolors
+        values.colormap = self.colormap
+        values.priority = self.markerpriority
+        values.x = []
+        values.y = []
+
+        if template.legend.priority > 0:
+            # Only labels that are set will show up in the legend
+            label_count = len(names) - len([i for i in names if i is None])
+            labels = self.create_text(template.legend.texttable, template.legend.textorientation)
+            labels.x = []
+            labels.y = []
+            labels.string = []
+
+        if self.draw_lines:
+            line = vcs.createline()
+            line.x = []
+            line.y = []
+            line.type = self.lines
+            line.color = self.linecolors if self.linecolors is not None else self.markercolors
+            line.width = self.linewidths
+            line.priority = self.linepriority
+
+            # This is up here because when it's part of the main loop, we can lose "order" of points when we flatten them.
+            for mag, theta in zip(magnitudes, thetas):
+                x = []
+                y = []
+
+                for m, t in zip(mag, theta):
+                    t = self.theta_from_value(t)
+                    r = self.magnitude_from_value(m, (m_scale[0], m_scale[-1])) * radius
+                    x.append(xmul * numpy.cos(t) * r + center[0])
+                    y.append(ymul * numpy.sin(t) * r + center[1])
+
+                if self.connect_groups:
+                    line.x.extend(x)
+                    line.y.extend(y)
+                else:
+                    line.x.append(x)
+                    line.y.append(y)
+
+        if self.markercolorsource.lower() in ('magnitude', "theta"):
+            # Regroup the values using the appropriate metric
+
+            mag_flat = numpy.array(magnitudes).flatten()
+            theta_flat = numpy.array(thetas).flatten()
+
+            if self.markercolorsource.lower() == "magnitude":
+                scale = m_scale
+                vals = mag_flat
+            else:
+                scale = theta_ticks
+                vals = theta_flat
+
+            indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1]))
+                       for i in range(len(scale) - 1)]
+            magnitudes = [mag_flat[inds] for inds in indices]
+            thetas = [theta_flat[inds] for inds in indices]
+            names = vcs.mklabels(scale, output="list")
+            names = [names[i] + " - " + names[i + 1] for i in range(len(names) - 1)]
+            label_count = len(names)
+
+        for mag, theta, name in zip(magnitudes, thetas, names):
+            x = []
+            y = []
+            for m, t in zip(mag, theta):
+                t = self.theta_from_value(t)
+                r = self.magnitude_from_value(m, (m_scale[0], m_scale[-1])) * radius
+                x.append(xmul * numpy.cos(t) * r + center[0])
+                y.append(ymul * numpy.sin(t) * r + center[1])
+
+            if template.legend.priority > 0 and name is not None:
+                y_offset = len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1)
+                lx, ly = template.legend.x1, template.legend.y1 + y_offset
+                x.append(lx)
+                y.append(ly)
+                labels.x.append(lx + .01)
+                labels.y.append(ly)
+                labels.string.append(str(name))
+            values.x.append(x)
+            values.y.append(y)
+
+        if template.legend.priority > 0:
+            canvas.plot(labels, **plot_kwargs)
+            del vcs.elements["textcombined"][labels.name]
+        if self.draw_lines:
+            canvas.plot(line, **plot_kwargs)
+            del vcs.elements["line"][line.name]
+
+        for el in self.to_cleanup:
+            if vcs.istexttable(el):
+                if el.name in vcs.elements["texttable"]:
+                    del vcs.elements["texttable"][el.name]
+            else:
+                if el.name in vcs.elements["textorientation"]:
+                    del vcs.elements["textorientation"][el.name]
+        self.to_cleanup = []
+
+        # Prune unneeded levels from values
+        to_prune = []
+        for ind, (x, y) in enumerate(zip(values.x, values.y)):
+            if x and y:
+                continue
+            else:
+                to_prune.append(ind)
+
+        for prune_ind in to_prune[::-1]:
+            del values.x[prune_ind]
+            del values.y[prune_ind]
+            if len(values.color) > prune_ind and len(values.color) > 1:
+                del values.color[prune_ind]
+            if len(values.size) > prune_ind and len(values.size) > 1:
+                del values.size[prune_ind]
+            if len(values.type) > prune_ind and len(values.type) > 1:
+                del values.type[prune_ind]
+
+        canvas.plot(values, bg=bg, donotstoredisplay=True)
+        del vcs.elements["marker"][values.name]
+        return canvas
diff --git a/Packages/xmgrace/Lib/ValidationFunctions.py b/Packages/xmgrace/Lib/ValidationFunctions.py
index b9325ba0244a3ae4c56671d35de6b8086c3d5507..b2a68514cb290cc985d4569f85ad5f2ffc5e2ae4 100644
--- a/Packages/xmgrace/Lib/ValidationFunctions.py
+++ b/Packages/xmgrace/Lib/ValidationFunctions.py
@@ -74,8 +74,8 @@ def isNumber(value):
 def checkPositiveInt(self, name, value):
     if not isNumber(value):
         raise ValueError(name + ' must be an integer')
-    elif (not (isinstance(value, int) or isinstance(value, long))
-            and (not int(value) == value)):
+    elif (not (isinstance(value, int) or isinstance(value, long)) and
+            (not int(value) == value)):
         raise ValueError(name + ' must be an integer')
     elif value < 0:
         raise ValueError(name + ' must be positve')
@@ -172,8 +172,7 @@ def checkSide(self, name, value):
 def checkLoc(self, name, value):
     """ check the loc (auto) or a location """
     if not (
-        (isinstance(value, str) and value.lower() == 'auto')
-        or
+        (isinstance(value, str) and value.lower() == 'auto') or
         isListorTuple(value)
     ):
         raise ValueError(name + 'must be a "auto" or a tuple/list')
@@ -296,8 +295,7 @@ def checkFormat(self, name, value):
 def checkAuto(self, name, value):
     """ check for 'auto' or a value """
     if not (
-        (isinstance(value, str) and value.lower() == 'auto')
-        or
+        (isinstance(value, str) and value.lower() == 'auto') or
         isNumber(value)
     ):
         raise ValueError(name + 'must be a "auto" or a number')
diff --git a/README.md b/README.md
index f79ed08fbec182bbdbd28897107afe77d9c2dec1..5968858fef233c8927baf12e51ffdf946665ba1d 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,12 @@
 uvcdat
 ======
 [![build status](https://travis-ci.org/UV-CDAT/uvcdat.svg?branch=master)](https://travis-ci.org/UV-CDAT/uvcdat/builds)
-[![stable version](http://img.shields.io/badge/stable version-2.4.0-brightgreen.svg)](https://github.com/UV-CDAT/uvcdat/releases/tag/2.4.0)
+[![stable version](http://img.shields.io/badge/stable version-2.4.1-brightgreen.svg)](https://github.com/UV-CDAT/uvcdat/releases/tag/2.4.1)
 ![platforms](http://img.shields.io/badge/platforms-linux | osx-lightgrey.svg)
-[![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.45136.svg)](http://dx.doi.org/10.5281/zenodo.45136)
+[![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.50101.svg)](http://dx.doi.org/10.5281/zenodo.50101)
+
+[![Anaconda-Server Badge](https://anaconda.org/uvcdat/uvcdat/badges/installer/conda.svg)](https://conda.anaconda.org/uvcdat)
+[![Anaconda-Server Badge](https://anaconda.org/uvcdat/uvcdat/badges/downloads.svg)](https://anaconda.org/uvcdat/uvcdat)
 
 Developed by partnering with ESGF and the community to create a larger problem-solving environment, UV-CDAT is an open source, easy-to-use application that links together disparate software subsystems and packages to form an integrated environment for analysis and visualization. This project seeks to advance climate science by fulfilling computational and diagnostic/visualization capabilities needed for DOE's climate research.
 
@@ -13,4 +16,4 @@ Developing and Contributing
 ------
 We'd love to get contributions from you! Please take a look at the
 [Contribution Documents](CONTRIBUTING.md) to see how to get your changes merged
-in.
\ No newline at end of file
+in.
diff --git a/TODO.txt b/TODO.txt
deleted file mode 100644
index fb03af1f8163c10a0036bd331fead24a1c2f8506..0000000000000000000000000000000000000000
--- a/TODO.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-- Fix ESMF build
-- Verify individual packages
-- Verify if we can build using system
-- Consistent install and build directories
-- Install headers and lib under their own package name
diff --git a/docs/ChangeLog.txt b/docs/ChangeLog.txt
deleted file mode 100644
index 5f7d4950563869a23e0924b758899defaa8f0a30..0000000000000000000000000000000000000000
--- a/docs/ChangeLog.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-2011-06-22 : recap (probably forgot many things):
- 		- grib2 support
-		- restAPI support
-		- GUI code moved to uv-cdat repo
-		- forecast support
-		- gridspec support
-2010-06-17 : got Qt to build automatically, (or use the binaries) 
-2010-06-17 : got --enable-vistrails to work and build SIP and PyQt
-2010-06-15 : got the right LDFLAGS going to compile Fortran extensions under mac
-2010-04-08 : build system fetches sources, no more gz in repository!
-2010-04-08 : preliminary merge with Qt branch. You should be able to build against Qt (which needs to be pre-installed independently for now)
-2010-02-11 : added netcdf 4.1
-2010-02-02 : added correct link calls for icc under linux
-2010-01-28 : fixed tiny-tiny leak in cdms2 and cdtime (setting dictionary item
-w/o decref tmp pyobject for int/string)
-2010-01-28 : latest netcdf4 daily from Ed, fixes a LOT of memory leaks.
-2010-01-13 : change default cdms2 compress value to no shuffle/deflate 1/defalteLevel 1
-2010-01-13 : newer NetCDF4, daily snapshot that has better chunking parameter, etc..
-2010-01-13 : latest Scientific Python  (SP)
-2010-01-13 : got vtk to build in framework mode under Mac
diff --git a/docs/Changes_3.3_to_4.0.sxc b/docs/Changes_3.3_to_4.0.sxc
deleted file mode 100644
index e811c67769e89d740d242f887021e5ac6dfc5dac..0000000000000000000000000000000000000000
Binary files a/docs/Changes_3.3_to_4.0.sxc and /dev/null differ
diff --git a/docs/Changes_3.3_to_4.csv b/docs/Changes_3.3_to_4.csv
deleted file mode 100644
index 762b7014b1043ea05f3cb49a0d6ed8fd68d272fe..0000000000000000000000000000000000000000
--- a/docs/Changes_3.3_to_4.csv
+++ /dev/null
@@ -1,133 +0,0 @@
-"Impact";"Package";"Section";"Change"
-1;"Cdms";"Axis";"Time dimension does not generate bounds automatically anymore – use cdms.setAutoBounds('on') -"
-1;"Cdms";"Curvilinear grids";"CurveGrids can be supplied in the axis list."
-1;"Cdms";"Curvilinear grids";"Added conversion routines: rect => curve => generic"
-1;"Cdms";"Curvilinear grids";"MV works with curvilinear grids"
-1;"Cdms";"Curvilinear grids";"Can read grids from scrip remap files"
-1;"Cdms";"Curvilinear grids";"getMesh returns a transposed mesh grid if requested."
-1;"Cdms";"Curvilinear grids";"On reading grids from files, check for cell convexity. Where that fails by default fix cells that straddle the 0 / 2pi boundary."
-1;"Cdms";"Curvilinear grids";"Added routines to write grids to external files in scrip format"
-1;"Cdms";"I/O";"Trapped seg faults on closed file objects."
-1;"Cdms";;"Fixed memory leaks."
-1;"Doc";"Tutorials";"New tutorials"
-1;"Exsrc";"DODS";"Now builds by default under Linux, updated to Version 3.4"
-1;"Vcdat";"DV";"A number reflecting selection/operation order appears next to selected variable"
-1;"Vcdat";"DV";"Templates and Graphic Method Listbox available (on/off) in Variables and Calculator area"
-1;"Vcdat";"Editors";"Template editor accessible from VCDAT (click and drag technology)"
-1;"Vcdat";"Editors";"Graphic Method Editors updated, preview/reset option added"
-1;"Vcdat";"General";"4 mode: 1-user defined layout; 2- Browse Variable; 3- Data Manipulation; 4- Graphics Display"
-1;"Vcdat";"GUI";"Layout changed, now has mode for plot edititng, letting you copy/paste and edit existing template and method"
-1;"Vcdat";"Plot";"Annotation pop-up available, with preview/reset/apply/cancel"
-1;"Vcdat";"Plot";"Page Layout Table to control what is displayed on the VCS Canvas, can turn on/off plots with one click"
-1;"Vcs";"Animations";" update to handle two or more on a page"
-1;"Vcs";"Animations";"Can read from a file"
-1;"Vcs";"Animations";"Can save to a file"
-1;"Vcs";"Curvilinear grids";"Handled automatically"
-1;"Vcs";"Projection";"Added gctp package to vcs, 28 new types of projection avalaible"
-1;"Vcs";"Templates";"Template editor by clicking"
-2;"Cdms";"I/O";"Added writeScripGrid, readScripGrid can read from a SCRIP mapping file"
-2;"Cdms";"Libcdms";"Removed spurious calendar error"
-2;"Cdms";"Time axis";"TransientAxis getCalendar returns DefaultCalendar if none specified. This is consistent with FileAxis.getCalendar()"
-2;"Doc ";"Tutorials";"Data can now be anywhere on user's space"
-2;"Genutil";"Arrayindexing";"New module, allows array indexing, e.g:C=genutil.arrayindexing.get(A,B)"
-2;"Genutil";"Filters";"Added filters module, so far only “running average”, 121 and custom"
-2;"Genutil";"Statistics";"fixed a bug in linear regression when trend is 0, probability are not set to missing"
-2;"Install";"All";"Updated external Packages to their latest versions, see README files"
-2;"Install";"CDAT";"Mac OS X “offically” supported"
-2;"Install";"Cdms";"Fix CDMS build for FreeBSD"
-2;"Install";"Cdms";"Added --cdms-only option to express_install."
-2;"Install";"Exsrc";"Now can build indiviudal package or exclude some"
-2;"Install";"Exsrc";"--list option added"
-2;"Install";"Exsrc";"--help option in install_script "
-2;"Vcdat";"0D (numbers)";"if no dimensions are present, then show the single vaule in the Defined Variables window"
-2;"Vcdat";"1D plot";"Overlay applies, use page layout for other types"
-2;"Vcdat";"DV";"Different mode of variable selection single/multiple"
-2;"Vcdat";"GUI";"Remove the 1-Plot, 2-Plot, 3-Plot, 4-Plot per page"
-2;"Vcdat";"I/O";"Simple binary file reader added"
-2;"Vcdat";"I/O";"Can read column arranged ascii files"
-2;"Vcdat";"I/O";"Save netcdf now has browser to go between directories"
-2;"Vcdat";"I/O";"Simple ascii file reader added"
-2;"Vcdat";"Menu";"New interface to user menu, more robust and intuitive"
-2;"Vcdat";"Scripting";"Added limited recordings of the functions (not available on beta)"
-2;"Vcs";"Boxfill";"Now can specify levels just like in isofill"
-2;"Vcs";"Isofill";"Now has legend otpion (i.e can specifiy where to  put values)"
-2;"Vcs";;"Linewidth option added where line are controlled"
-2;"Vcs";;"User can click on the plot and get worldcoordinate/index values, and actual value"
-3;"Cdms";"I/O";"picker selector, let you pick non contiguous values"
-3;"Cdutil";"Times";"Yearly and xDaily Time Bounds setting, plus accept either slab or axis"
-3;"Contrib";"F2py";"Added F2PY, fortran wrapper"
-3;"Install";"HDF";"--HDF= option let user point to HDF libraries"
-3;"Install";"Scrip";"Source code distributed but not installed"
-3;"Vcdat";"DV";"Replacing the eval call for the exec call. Now the user can enter any command"
-3;"Vcdat";"DV";"Fix for the automatic Numeric and MA conversion to MV. The user now has a choice to convert Numeric and MA to MV. That is, the user will need to select the ""Automatic Conversion to MV"" toggle in the ""Preferences"" menu to turn this feature on or off"
-3;"Vcs";"Plot";"Very High Resolution Continent Files generated via GMT are available on the web"
-3;"Vcs";"Templates";"Scaling, now can scale fonts"
-3;"Vcs";"Text";"Bg option works"
-4;"Cdms";"I/O";"Implemented isClose()."
-4;"Contrib";"Install";"Automatically built"
-4;"Exsrc";"VTK";"VTK (beta version) added to distrib, not built by default this is experimental"
-4;"Genutil";"Xmgrace";"Now can pass arguments at init time"
-4;"Vcdat";"General";"Retains user settings"
-4;"Vcdat";"General";"Predefined Region"
-4;"Vcdat";"General";"Can define colors using intensiy"
-4;"Vcdat";"Taylordiagram";"GUI updated"
-4;"Vcs";"Primitives";"Projection accessible"
-4;"Vcs";"Taylordiagram";"standard dev added to xaxis"
-4;"Vcs";"Taylordiagram";"taylor control the xtic/ytic/cticks"
-4;"Vcs";"Yxvsx, Xyvsy";"Auto now works again (no more blank canvas)"
-4;"Vcs";;"Improved thread support"
-5;"Cdms";"Cdscan";"Added –nofiles, --execfile option"
-5;"Cdms";"Cdscan";"Added --notrim-lat option"
-5;"Cdms";"Cdscan";"Added --filesxml option"
-5;"Cdms";"Curvilinear grids";"Raise error if nonrectangular grid has no bounds associated"
-5;"Cdms";"I/O";"Added gridftp protocol"
-5;"Cdutil";"Times";"fixed times.setTimesBoundsMonthly for end of month storage"
-5;"Cdutil";"Vertical";"Vertical Interpolation Tools (sigma2pressure)"
-5;"Contrib";"IaGraph";"Package of interactive graphing tools. IaGraph will create line plots, scatter plots, and contour plots"
-5;"Exsrc";"R";"Added R (statistical package) sources to distrib, not built by default"
-5;"Genutil";"Xmgrace";"safe/nosafe problem solved, version controled before passing the argument"
-5;"Ncml";;"New Package"
-5;"Scrip";;"New module, interface to scrip regridder"
-5;"Vcdat";"DV";"Icons tools added"
-5;"Vcdat";"General";"Keep (or not) Windows in front of main window"
-6;"Cdms";"Axis";"Align isLatitude, isLongitude test with CF convention"
-6;"Contrib";"ComparisonStatistics";"fixed a bug for TimeComponent 18, the seasonal weights used in computing the annual means were slightly off"
-6;"Contrib";"Rpy";"Added Rpy package, works if you have R on your system"
-6;"Exsrc";"Pbm";"Pbmplus replaced with netpbm on Linux and Mac systems"
-6;"Genutil";"Statistics";"Geometric Mean, Median and Rank functions added"
-6;"Vcdat";"1D plot";"Fixed multiplier annotation for 1D plots."
-6;"Vcdat";"DV";"Support for chemistry attributes"
-6;"Vcdat";"General";"Exit Popup to retain settings (can be turned off)"
-6;"Vcdat";"Menu";"Option pull down from main menu was changed to “Preferences”"
-6;"Vcs";"Animations";"bug fix for pan and zoom"
-6;"Vcs";"Templates";"Ratio options now supported, let user definean y/x ratio or if lat/lon let vcs find a good one"
-7;"Contrib";"ComparisonStatistics";"Handles fortran NaN"
-7;"Vcdat";"Annotations";"Changed annotation so a blank text field will print nothing in on the Canvas"
-7;"Vcs";"boxfill/isofill";"Extension bug fix, if levels set after ext_1"
-7;"Vcs";"Taylordiagram";"Taylordiags single precision ok now"
-8;"Cdms";"Cdscan";"Fixed bug when file has 'bounds' dimension."
-8;"Contrib";"ComparisonStatistics";"Updated doc"
-8;"Vcs";"Markers";"0 means no marker"
-8;"Vcs";"Taylordiagram";"fixed taylordiagrams.script and also listelements('taylordiagram') now works (x.show returns None)"
-9;"Cdms";"Drs";"Fixed the cycle process for DRS files. Must close DRS files before you open them again."
-9;"Cdutil";;"Removed Propertied Class dependencies, replaced with standard python (object/property)"
-9;"Contrib";"ComparisonStatistics";"Bug fixing"
-9;"Contrib";"Pyfort";"New version 8.4"
-9;"Vcdat";"General";"User can defined the name of its default template/graphic methods"
-9;"Vcs";"1D plots";"Bug fixes for graphics methods. That is, if the data only has 1 dimensions and 2 or more dimensions are required, then use Yxvsx to plot the data"
-9;"Vcs";"Printer";"lanscape/portrait argument bug fix"
-9;"Vcs";"Taylordiagram";"Skill drawing, bug fix"
-9;"Vcs";;"Major changes to the VCS graphics methods"
-9;"Vcs";;"Fixed attribute settings for missing attributes"
-10;"Cdms";"I/O";"Fixed bug in dataset.write when time axis is a float. This shows up with Numeric V23.1"
-10;"Cdms";;"Added xmllib to ditrib since it'll be abandoned in future python"
-10;"Cdutil";"VariableMatcher";"Added comment keyword"
-10;"Genutil";;"Removed Propertied Class dependencies, replaced with standard python (object/property)"
-10;"Vcs";"Colormap";"Added the NCAR Color Map to the initial.attribute file."
-10;"Vcs";"Colormap";"Cleanedup"
-10;"Vcs";"Templates";"fixed JYP comments about ""inconsistency"" on comment#1 of UL1of4 template"
-10;"Vcs";;"Removed Propertied Class dependencies, replaced with standard python (object/property)"
-11;"Cdms";"Curvilinear grids";"auxcoord, gengrid modules"
-11;"Esg";;"New Package"
-11;"Install";"CDAT";"Building GNU tar on older systems..."
-11;"Install";"CDAT";"--psql option added"
diff --git a/docs/Changes_3.3_to_4.pdf b/docs/Changes_3.3_to_4.pdf
deleted file mode 100644
index 408ca428a82faabccf24180853af8b797fb397aa..0000000000000000000000000000000000000000
Binary files a/docs/Changes_3.3_to_4.pdf and /dev/null differ
diff --git a/docs/Changes_3.3_to_4.xls b/docs/Changes_3.3_to_4.xls
deleted file mode 100644
index aabf7d192034c44d1ff0f6151d443b30e2aba7ea..0000000000000000000000000000000000000000
Binary files a/docs/Changes_3.3_to_4.xls and /dev/null differ
diff --git a/docs/DeveloperHowTo.html b/docs/DeveloperHowTo.html
deleted file mode 100644
index 209b85738d20403e5ecc269f213095b15a331c21..0000000000000000000000000000000000000000
--- a/docs/DeveloperHowTo.html
+++ /dev/null
@@ -1,6 +0,0 @@
-<html>
-
-<head>
-<meta http-equiv="refresh" content="0; url=http://github.com/UV-CDAT/uvcdat/wiki/Development"/>
-</head>
-</html>
diff --git a/docs/HELP.html b/docs/HELP.html
deleted file mode 100644
index 7430ec110119a492443b4dc78206cf10268c1df4..0000000000000000000000000000000000000000
--- a/docs/HELP.html
+++ /dev/null
@@ -1,6 +0,0 @@
-<html>
-
-<head>
-<meta http-equiv="refresh" content="0; url=http://github.com/UV-CDAT/uvcdat/wiki/Building-UVCDAT"/>
-</head>
-</html>
diff --git a/docs/README.txt b/docs/README.txt
deleted file mode 100644
index 99bf6afefee79913d78bd7f13670232197f9eda0..0000000000000000000000000000000000000000
--- a/docs/README.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-The documentation of UV-CDAT has moved to github. Please visit
-this page for the latest documentation:  https://github.com/UV-CDAT/uvcdat/wiki
diff --git a/docs/ReleaseNotes.txt b/docs/ReleaseNotes.txt
deleted file mode 100644
index ca81f7443013c7b2e5fb92cd0658c54cb298bca9..0000000000000000000000000000000000000000
--- a/docs/ReleaseNotes.txt
+++ /dev/null
@@ -1,226 +0,0 @@
-********************* Changes in the next release  ****
-********************* Release 4.3 *************************************
-New build system, CDAT is now installed under CDAT/VERSION/bin 
-New Fonts system: most ttf fonts work now!
-New direct postscript output
-Hatch/pattern output working (see known bug though)
-Improved VCDAT
-New Package: Thermo
-EZTemplate
-genutil.filters
-going away from pyfort, using f2py instead
-pydebug included
-mpeg output
-generalized grid support
-improved web documentation
-
-!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!
-There is a known bug in cairo (the postscript rendering library) that will
-cause the output to be wrong if you are using both
-pattern/hatches and dashed lines
-
-
-********************* Release 4.0 *************************************
-
-----------------------------
-MAJOR CHANGES TO THE RELEASE
-----------------------------
-
-*- Support for Generalized Grids, regridding possible using the SCRIP regridder
-(source code provided but not installed)
-
-*- Template Editor, let you resize, drag, drop object on your VCS Canvas
-
-*- Major Overhaull of the VCDAT GUI, with template and graphic methods
-editors. Various mode of use (variable browsing, computation mode, graphics
-mode). 
-
-*- Page Layout Editor available in VCDAT
-
-*- More projection available (GCTP package), total of 28 new types of
-projection, each with full parameters control
-
-*- Improved install process
-
-*- New packages: filter, vertical interpolation, IaGraph, f2py, R/Rpy, VTK
-(Linux only, and not supported), thermo (thermodynamic diagrams)
-
-*- Full list of Changes available in files Changes_3.3_to_4.* (Open Office, csv and
-pdf formats)
-
--------------------------
-KNOWN BUGS
--------------------------
-
-*- There is apparently a bug on the Mac X server, this forces the user to move
-the VCS canvas window in order for the threads to start. If not the 
-application might hang.
-
-*- Beta version does not have support for inline scripting from VCDAT, final
-version and some future beta wiull have full support.
-
-
-********************* Release 3.3 *************************************
-01. vcs plotting now as at-plotting-time overwritting capabilities, i.e, you can change a graphic method or template attribute as you plot by passing it to the plot command
-02. vcs text function let you know draw string on the fly, also accepts colors argument passed a colornames
-03 vcs canvas object have a new function "match_color" which allows the user to get the color in the colormap (in use or passed) that is closest from its desirted color
-04. rank function now available in genutil.statstics
-
-********************* Release 3.3 (10/01/2002)  ************************
-01. Meshfill support is now complete (for graphical output)
-       all is done at the C level, projection are authorized as well as
-       axes transformations. MUCH faster than the old python based support
-02. CompasrisonsStatistics contrib package added (PCMDI's compall)
-03. VariablesMatcher, VariableConditioner objects added, allows easy regrid/masking 
-       of datasets for comparison, module: cdutil
-04. Build changes have removed the need for users to set LD_LIBRARY_PATH.
-05. Now available on AIX.
-06. genutil.statistics now has percentiles and median functions
-07. genutil now has grower function (add dimensions to MV so they have matching shapes)
-08. genutil.statistics: fixed a bug when input datasets had different masks 
-       (now uses the union of both masks)
-09. pyclimate package added to the contrib, which is a Python package designed
-       to accomplish some usual tasks during the analysis of climate variability
-       using Python. It provides functions to perform some simple IO operations,
-       operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA
-       analysis of coupled data sets, some linear digital filters, kernel based
-       probabilitydensity function estimation and access to DCDFLIB.C library from
-       Python.
-10. Fixed the Read Script File file browser dialog "File of type". The
-       text said "Search for netCDF" and "Search for GrADS". This has
-       been replaced with "Search fo VCS file" and "Search for Python files".
-11. Sync'ed the continents to work with the Page Editor. In the process,
-       I fixed the canvas update to work with threads
-12. Ran Insure++ on code to clean up memory leaks in XGKS and VCS.
-13. Fixed "open" bug. That is, fixed bug for when x.open() called. It no
-       longer exists out of python from the command line.
-14. Can now print a VCS Canvas plot, produced in background mode,
-       to an output file (i.e., postscript, cgm, gif, or eps.).
-15. Reordered the include files to find the VCS include files first.
-       This solves the problem of VCS not compiling on Solaris 2.8
-       platforms. Solaris 2.8 platforms have a project.h file located in
-       the /usr/lib directory. project.h is also an include file for VCS.
-       In any case, forcing the VCS include files to be seen first, solves
-       this problem and should have no effect on other platforms.
-16. Bug fixes for the DEC platform. The low-level primative attribute settings
-       now work for the DEC platform.
-17. Fixed the legend label to be positioned at the bottom right corner of
-       the plot. It will also move with the legend when the legend is
-       altered by the user.
-18. Now the user does not have to specify the DISPLAY variable in order to run
-       VCS or VCDAT. That is the user can now run the VCS module from the
-       command-line, script, background mode, or from VCDAT without specifying
-       the environment variable DISPLAY. That is, XOpenDisplay is now set
-       to ":0.0" or "localhost:0.0" for VCS and for Tkinter.
-19. This function displays graphics segments, which are currently stored in
-       the frame buffer, on the VCS Canvas. That is, if the plot function
-       was called with the option bg = 1 (i.e., background mode), then the
-       plot is produced in the frame buffer and not visible to the user.
-       In order to view  the graphics segments, this function will copy
-       the contents of the frame buffer to the VCS Canvas, where the
-       graphics can be viewed by the user.
-20. Added outfill and outline to the browser interface. They are now part of the
-       graphics method pulldown selection. Also add a graphics method attribute
-       settings interface for the two graphics method so the user can
-       change their attributes.
-21. Added the capability for low-level primatives to accept Numeric arrays for
-       their list of x and y axis point values.
-22. Bug fix for generating more than 140 plots. That is, commented out
-        Py_DECREF(dict);
----
-       This DECREF causes memory problems for Python if the
-       user chooses to generate more than 140 plots in a Python loop.
-       That is, needed memory is removed then overwritten, which causes
-       Python to give the undefined global name error for "range", "len",
-       or some other globally defined Python key word.
-23. vcdat_teaching_script_file.py now work with appropriate graphics methods.
-       That is, you can now run "python vcdat_teaching_script_file.py" to replay
-       what was done in the VCDAT session.
-
-       Note: vcdat_recording_script_file.py worked because it did not rely on the
-               ASD.scr file.
-24. Show the index values for 1D axis variables
-25. Updated the "Alter Plot" GUI to reflect the minor ticks settings and the
-       axis type (i.e., Linear or Log10). Currently, the user can only view the
-       axis representation. Later when time permits, I will change the "View"
-       text input window to an "Edit" text input window. This will let the user
-       specify (via a Python dictionary) the location of the axis label and the
-       label text string. For example, { -90: "south pole", 0: "equator",
-       90: "north pole"}.
-26. Modified the VCDAT module to track user directory, file, and variable requests.
-       It also logs the user's wall clock and cpu time. Examples: "vcdat -u",
-       "vcdat -h".
-27. Updated the VCS initial.attributes file to Mike Wehner's initial.attibutes file.
-28. The Boxfill legend labels can be set with a list or a dictionary. For
-       example: [0, 10, 20, 30, 40] or {40:'some text', 50:'some more text'}.
-29. boxfill.legend={.5:'hi'} will now work if the legend is vertical
-       or horizontal.
-30. Colormap bug fix. The default colormap can now be changed,
-       then saved as a new colormap.
-31. VCDAT option menu: -h, print out help menu; -n, turn splash screen off, -u, turn
-       user tracking mode on. See item 17.
-32. Corrected the legend when plotting log based boxfill plots.
-33. Updated the X and Y axes, so that the user can modify and
-       change the axes values.
-34. The graphics method's data world coordinates (i.e., datawc_x1, datawc_x2,
-       datawc_y1, and datawc_y2) will override the incoming variable's coordinates.
-35. VCDAT has a new icon to bookmark files, works just like the directories
-36. Beta DODS support on Linux, build using --dods=/path/to/DODS/directory
-37. gplot now builds automatically
-38. various utilies necessary to produce GIF output are also now provided as part
-       of the install process.
-39. VCS template object now have scale, reset, move and moveto associated function
-40. added cddump utility (ncdump like utility that works on any type of file that
-       cdms can open) 
-41. VCDAT has new icon functions: grower, mask, getmask, less, greater, equal, not, standard deviation
-
-********************* Release 3.2 (4/15/2002)  ************************
-1. Revised build system allows for installation into an existing Python.
-   Please see README.txt to learn the new installation system.
-   (Paul Dubois). DRS and QL support are not built by default, unless you
-   use the --PCMDI option.
-2. In genutil.statistics, the linearegression now can return std error
-   and probabilities for regression constant (in addition of the regression coef).
-   Power users that used this capability should be aware that the syntax slightly
-   changed, see the doc string. (Charles Doutriaux)
-3. Contributed package shgrid, cssgrid, dsgrid  added (Clyde Dease)
-   See "INTRODUCTION TO NGMATH" below for descriptions.
-   After installation, go to the Test subdirectory of each and run python shgridtest.py;
-   a documentation file will be produced as a byproduct.
-4. Updated averager so it now takes integer for single axis operation.
-   Updated averager so the weights options are now "unweighted" and "weighted" instead of 
-   "equal" and "generate"; old options still work.
-   Updated averager so the weights keyword now works.
-   Updated the doc strings of averager so they reflect the previous 2 changes.
-   Updated genutil.statistics so weights options are now compatible with cdutil.averager.
-
-5. Solution to the high CPU usage problem when displaying a VCS Canvas. 
-   Modification to the VCS Canvas event mainloop was done to avoid high CPU 
-   usage while the VCS Canvas sat idle. This modification has no other 
-   effects on the graphics or the VCS Canvas behavior.
-
-6. Extensive improvements to the VCDAT browser. These are detailed at:
-   http://esg.llnl.gov/cdat/GUI_Modifications.html
-
-   INTRODUCTION TO NGMATH
-
-        The ngmath library is a collection of interpolators and approximators
-for one-dimensional, two-dimensional and three-dimensional data. The packages, 
-which were obtained from NCAR, are:
-
-  natgrid -- a two-dimensional random data interpolation package based on 
-              Dave Watson's nngridr. NOT built by default in CDAT due to 
-             compile problems on some platforms. Works on linux.
-
-  dsgrid --  a three-dimensional random data interpolator based on a
-             simple inverse distance weighting algorithm.
-
-  fitgrid -- an interpolation package for one-dimensional and two-dimensional 
-             gridded data based on Alan Cline's Fitpack. Fitpack uses splines 
-             under tension to interpolate in one and two dimensions.  
-             NOT IN CDAT.
-
-  csagrid -- an approximation package for one-dimensional, two-dimensional and 
-             three-dimensional random data based on David Fulker's Splpack. 
-             csagrid uses cubic splines to calculate its approximation function.
-
diff --git a/exsrc/Makefile.am.pixman b/exsrc/Makefile.am.pixman
deleted file mode 100644
index e57c21c46803f89b290d364520bef508aacb2426..0000000000000000000000000000000000000000
--- a/exsrc/Makefile.am.pixman
+++ /dev/null
@@ -1,54 +0,0 @@
-lib_LTLIBRARIES = libpixman-1.la
-libpixman_1_la_LDFLAGS = -version-info $(LT_VERSION_INFO) -no-undefined -export-symbols pixman.def
-libpixman_1_la_LIBADD = @DEP_LIBS@ -lm
-libpixman_1_la_SOURCES =		\
-	pixman.h			\
-	pixman-access.c			\
-	pixman-access-accessors.c	\
-	pixman-region.c			\
-	pixman-private.h		\
-	pixman-image.c			\
-	pixman-combine.c		\
-	pixman-compose.c		\
-	pixman-compose-accessors.c	\
-	pixman-pict.c			\
-	pixman-source.c			\
-	pixman-transformed.c		\
-	pixman-transformed-accessors.c	\
-	pixman-utils.c			\
-	pixman-edge.c			\
-	pixman-edge-accessors.c		\
-	pixman-edge-imp.h		\
-	pixman-trap.c			\
-	pixman-compute-region.c		\
-	pixman-timer.c
-
-libpixmanincludedir = $(includedir)/pixman-1/
-libpixmaninclude_HEADERS = pixman.h pixman-version.h
-noinst_LTLIBRARIES = 
-
-EXTRA_DIST = Makefile.win32
-
-# mmx code
-if USE_MMX
-noinst_LTLIBRARIES += libpixman-mmx.la
-libpixman_mmx_la_SOURCES = \
-	pixman-mmx.c \
-	pixman-mmx.h
-libpixman_mmx_la_CFLAGS = $(DEP_CFLAGS) $(MMX_CFLAGS)
-libpixman_mmx_la_LIBADD = $(DEP_LIBS)
-libpixman_1_la_LIBADD += libpixman-mmx.la
-endif
-
-
-# sse2 code
-if USE_SSE2
-noinst_LTLIBRARIES += libpixman-sse.la
-libpixman_sse_la_SOURCES = \
-	pixman-sse.c \
-	pixman-sse.h
-libpixman_sse_la_CFLAGS = $(DEP_CFLAGS) $(SSE_CFLAGS)
-libpixman_sse_la_LIBADD = $(DEP_LIBS)
-libpixman_1_la_LIBADD += libpixman-sse.la
-endif
-
diff --git a/exsrc/Numeric.sh b/exsrc/Numeric.sh
deleted file mode 100755
index d82ca417b4fb6c66a9eeea362bbf6fc5635b900b..0000000000000000000000000000000000000000
--- a/exsrc/Numeric.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-PACKAGE="Numeric"
-. ./prolog.sh 
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-   echo "Numeric won't build on 64bit system, use numpy instead"
-   exit
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-   echo "Numeric won't build on 64bit system, use numpy instead"
-   exit
-fi
-
-# Numeric, MA, PropertiedClasses, etc.
-(cd Numeric-*; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/Pmw.sh b/exsrc/Pmw.sh
deleted file mode 100755
index 70629fa8eaacdafa7d7473b531f256b316f2402c..0000000000000000000000000000000000000000
--- a/exsrc/Pmw.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="Pmw"
-. ./prolog.sh 
-# Twisted.
-(cd Pmw-* ; cd src;  ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/R.sh b/exsrc/R.sh
deleted file mode 100755
index 4e2a38f55632fff0a46ebc9d6e5c3aad683cf4ed..0000000000000000000000000000000000000000
--- a/exsrc/R.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="R"
-. ./prolog.sh
-(cd R*; ./configure --enable-R-shlib --prefix=${prefix}/Externals/R ; make ; make install ; make install ; cd ${prefix}/Externals/R/lib ; ln -s Externals/R/bin/libR.so )
-
diff --git a/exsrc/README.txt b/exsrc/README.txt
deleted file mode 100644
index 72e35f7dcf2d2f35834461131d4a182a1335d5c5..0000000000000000000000000000000000000000
--- a/exsrc/README.txt
+++ /dev/null
@@ -1,23 +0,0 @@
-This directory cannot be built until Python is built.
-
-This directory contains sources for some parts of the CDAT
-system that we didn't write or which change on very slow timescales. 
-
-./install_script /whereyouwanttoputit 
-
-The subdirectory src contains the tarred/zipped files that are used to make
-the product. A subdirectory build will be created that contains the output.
-Some of these products can be tested by changing to their directory under 
-build and typing "make test".
-
-This process will unpack the tar files from the src directory if there is no 
-build subdirectory. Otherwise it doesn't. If you put in a new source file
-into src you need to clean before building.
-
-Log files are created in the build subdirectory.
-
-Each of the pieces may be built individually using the corresponding .sh 
-files in this directory. Some warning errors are usual from 
-many of the packages and vary from architecture to architecture.
-
-
diff --git a/exsrc/VTK_BUILD_ANSWERS.core b/exsrc/VTK_BUILD_ANSWERS.core
deleted file mode 100644
index d20aa1e611742267a45eea181367b9223027a842..0000000000000000000000000000000000000000
--- a/exsrc/VTK_BUILD_ANSWERS.core
+++ /dev/null
@@ -1,1320 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: CDAT_PREFIX/VTK
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=OFF
-
-//Build VTK with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=OFF
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake allow.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.0
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-//
-CMAKE_BUILD_TYPE:STRING=
-
-//C++ compiler
-CMAKE_CXX_COMPILER:STRING=c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3
-
-//Flags used by the compiler during Release with Debug Info builds.
-//
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler
-CMAKE_C_COMPILER:STRING=gcc
-
-//Flags for C compiler.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3
-
-//Flags used by the compiler during Release with Debug Info builds.
-//
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-//
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=OFF
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=CDAT_PREFIX
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-//
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-//
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=ON
-
-//If true, cmake will use relative paths in makefiles and projects.
-//
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=OFF
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=OFF
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=OFF
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=-I/usr/X11R6/include
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so
-
-//Path to program used to compress files for transfer to the dart
-// server
-COMPRESSIONCOMMAND:FILEPATH=/usr/bin/gzip
-
-//Path to the coverage program that Dart client uses for performing
-// coverage inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Limit of reported errors, -1 reports all.
-DART_BUILD_ERROR_REPORT_LIMIT:BOOL=OFF
-
-//Limit of reported warnings, -1 reports all.
-DART_BUILD_WARNING_REPORT_LIMIT:BOOL=OFF
-
-//If you have Dart installed, where is it located?
-DART_ROOT:PATH=DART_ROOT-NOTFOUND
-
-//Time alloted for a test before Dart will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Show the actual output of the build, or if off show a . for each
-// 1024 bytes.
-DART_VERBOSE_BUILD:BOOL=OFF
-
-//Should Dart server send email when build errors are found in
-// Continuous builds?
-DELIVER_CONTINUOUS_EMAIL:BOOL=OFF
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/DICOMParser
-
-//Path to gunzip executable
-GUNZIPCOMMAND:FILEPATH=/usr/bin/gunzip
-
-//Path to java command, used by the Dart server to create html.
-//
-JAVACOMMAND:FILEPATH=/usr/bin/java
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/gmake -i
-
-//Path to Rational purify command, used for memory error detection.
-//
-MEMORYCHECK_COMMAND:FILEPATH=MEMORYCHECK_COMMAND-NOTFOUND
-
-//File that contains suppressions for the memmory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//What is the path where the file GL/gl.h can be found
-OPENGL_INCLUDE_DIR:PATH=/usr/share/doc/NVIDIA_GLX-1.0/include
-
-//Where can one of the MesaGL or GL libraries be found
-OPENGL_gl_LIBRARY:FILEPATH=/usr/lib/libGL.so
-
-//Where can one of the MesaGLU or GLU libraries be found
-OPENGL_glu_LIBRARY:FILEPATH=/usr/lib/libGLU.so
-
-//What is the path where the file GL/xmesa.h can be found
-OPENGL_xmesa_INCLUDE_DIR:PATH=OPENGL_xmesa_INCLUDE_DIR-NOTFOUND
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=CDAT_PREFIX/bin/python
-
-//What is the path where the file Python.h can be found
-PYTHON_INCLUDE_PATH:PATH=CDAT_PREFIX/include/pythonPY_VERSION
-
-//Where can one of the python23, python2.3, python2.3.dll, python22,
-// python2.2, python2.2.dll, python21, python2.1, python2.1.dll,
-// python20, python2.0, python2.0.dll, python16, python1.6, python1.6.dll,
-// python15, python1.5 or python1.5.dll libraries be found
-PYTHON_LIBRARY:FILEPATH=CDAT_PREFIX/lib/pythonPY_VERSION/config/libpythonPY_VERSION.a
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.so
-
-//Path to scp command, used by some Dart clients for submitting
-// results to a Dart server (when not using ftp for submissions)
-//
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=
-
-//What is the path where the file tcl.h can be found
-TCL_INCLUDE_PATH:PATH=CDAT_PREFIX/include
-
-//Where can one of the tcl, tcl84, tcl8.4, tcl83, tcl8.3, tcl82,
-// tcl8.2, tcl80 or tcl8.0 libraries be found
-TCL_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtclTCLTK_VERSION.a
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=CDAT_PREFIX/bin/tclshTCLTK_VERSION
-
-//What is the path where the file tk.h can be found
-TK_INCLUDE_PATH:PATH=CDAT_PREFIX/include
-
-//Where can one of the tk, tk84, tk8.4, tk83, tk8.3, tk82, tk8.2,
-// tk80 or tk8.0 libraries be found
-TK_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtkTCLTK_VERSION.a
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=CDAT_PREFIX/VTK
-
-//What is the path where the file VTKData.readme can be found
-VTK_DATA_ROOT:PATH=CDAT_BUILD_DIR/VTK/VTKData
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-//
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK
-
-//Build with static Tcl/Tk support. TCL_LIBRARY and TK_LIBRARY
-// must point to the corresponding Tcl/Tk static libraries (example,
-// tcl84sx.lib, tk84sx.lib).
-VTK_TCL_TK_STATIC:BOOL=ON
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=OFF
-
-//Use the ANSI standard iostream library.
-VTK_USE_ANSI_STDLIB:BOOL=ON
-
-//Turn this option off and tests will not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Where can the hints file be found
-VTK_WRAP_HINTS:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Path to an internal program.
-VTK_WRAP_PYTHON_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPython
-
-//Path to an internal program.
-VTK_WRAP_PYTHON_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPythonInit
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=ON
-
-//Path to an internal program.
-VTK_WRAP_TCL_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTcl
-
-//Path to an internal program.
-VTK_WRAP_TCL_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTclInit
-
-//What is the path where the file X11/X.h can be found
-X11_X11_INCLUDE_PATH:PATH=/usr/X11R6/include
-
-//Where can the X11 library be found
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.so
-
-//Where can the Xext library be found
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.so
-
-//What is the path where the file X11/Xlib.h can be found
-X11_Xlib_INCLUDE_PATH:PATH=/usr/X11R6/include
-
-//What is the path where the file X11/Xutil.h can be found
-X11_Xutil_INCLUDE_PATH:PATH=/usr/X11R6/include
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=vtkCommon;
-
-//Dependencies for the target
-vtkCommonTCL_LIB_DEPENDS:STATIC=vtkCommon;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=-lpthread;-ldl;-lm;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonPython;
-
-//Dependencies for the target
-vtkFilteringTCL_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonTCL;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringPython;vtkGraphicsPython;
-
-//Dependencies for the target
-vtkGenericFilteringTCL_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringTCL;vtkGraphicsTCL;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=vtkFiltering;vtkGraphics;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringPython;
-
-//Dependencies for the target
-vtkGraphicsTCL_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringTCL;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=vtkFiltering;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingPython;vtkIOPython;
-
-//Dependencies for the target
-vtkHybridTCL_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingTCL;vtkIOTCL;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=vtkRendering;vtkIO;vtkexoIIc;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringPython;
-
-//Dependencies for the target
-vtkIOTCL_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringTCL;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=vtkFiltering;vtkDICOMParser;vtkpng;vtkzlib;vtkjpeg;vtktiff;vtkexpat;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringPython;
-
-//Dependencies for the target
-vtkImagingTCL_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringTCL;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=vtkFiltering;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=vtkRendering;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsPython;vtkImagingPython;
-
-//Dependencies for the target
-vtkRenderingTCL_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsTCL;vtkImagingTCL;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=vtkGraphics;vtkImaging;vtkIO;vtkftgl;vtkfreetype;vtkzlib;/usr/lib/libGL.so;-lXt;-lSM;-lICE;-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingPython;vtkIOPython;
-
-//Dependencies for the target
-vtkVolumeRenderingTCL_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingTCL;vtkIOTCL;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=vtkRendering;vtkIO;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingPython;vtkHybridPython;
-
-//Dependencies for the target
-vtkWidgetsTCL_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingTCL;vtkHybridTCL;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=vtkRendering;vtkHybrid;
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkfreetype_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=/usr/lib/libGL.so;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=vtkzlib;
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=vtkzlib;vtkjpeg;
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapPython:INTERNAL=vtkWrapPython
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapPythonInit:INTERNAL=vtkWrapPythonInit
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapTcl:INTERNAL=vtkWrapTcl
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapTclInit:INTERNAL=vtkWrapTclInit
-//Advanced flag for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//Advanced flag for variable: BUILD_TESTING
-BUILD_TESTING-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//Advanced flag for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake
-//This is the directory where this CMakeCahe.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=CDAT_PREFIX/VTK
-//Major version of cmake used to create the current loaded cache
-//
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-//
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=0
-//Major version of cmake used to create the current loaded cache
-//
-CMAKE_CACHE_RELEASE_VERSION:INTERNAL=patch 6
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=CDAT_PREFIX/bin/cmake
-//Advanced flag for variable: CMAKE_CTEST_COMMAND
-CMAKE_CTEST_COMMAND-ADVANCED:INTERNAL=1
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=CDAT_PREFIX/bin/ctest
-//Advanced flag for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-//full path to the compiler cmake found
-CMAKE_CXX_COMPILER_FULLPATH:INTERNAL=/usr/bin/c++
-//Result of TRY_COMPILE
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=TRUE
-//Advanced flag for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-//full path to the compiler cmake found
-CMAKE_C_COMPILER_FULLPATH:INTERNAL=/usr/bin/gcc
-//Result of TRY_COMPILE
-CMAKE_C_COMPILER_WORKS:INTERNAL=TRUE
-//Advanced flag for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=CDAT_PREFIX/bin/ccmake
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-//
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-//
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Have include sstream
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=1
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include limits.h
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include pthread.h
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have include sys/prctl.h
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=1
-//Have include unistd.h
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=CDAT_BUILD_DIR/VTK/VTK
-//Advanced flag for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//Advanced flag for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-//
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-//
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-//
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//Advanced flag for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//Advanced flag for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=CDAT_PREFIX/share/CMake
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-//
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-//
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-//
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of TRY_RUN
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of TRY_RUN
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of TRY_RUN
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of TRY_RUN
-CMAKE_SIZEOF_LONG:INTERNAL=4
-//Result of TRY_RUN
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of TRY_RUN
-CMAKE_SIZEOF_VOID_P:INTERNAL=4
-//Advanced flag for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//Advanced flag for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/bin/uname
-//Advanced flag for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//Advanced flag for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: COMPRESSIONCOMMAND
-COMPRESSIONCOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//Path to an executable
-CommonCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx
-//Advanced flag for variable: DART_BUILD_ERROR_REPORT_LIMIT
-DART_BUILD_ERROR_REPORT_LIMIT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_BUILD_WARNING_REPORT_LIMIT
-DART_BUILD_WARNING_REPORT_LIMIT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_ROOT
-DART_ROOT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_VERBOSE_BUILD
-DART_VERBOSE_BUILD-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DELIVER_CONTINUOUS_EMAIL
-DELIVER_CONTINUOUS_EMAIL-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin
-//Path to an executable
-FilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering/Testing/Cxx
-//Advanced flag for variable: GUNZIPCOMMAND
-GUNZIPCOMMAND-ADVANCED:INTERNAL=1
-//Path to an executable
-GenericFilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering/Testing/Cxx
-//Path to an executable
-GraphicsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics/Testing/Cxx
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include fcntl.h
-HAVE_FCNTL_H:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UNSIGNED_CHAR:INTERNAL=TRUE
-//Have include HAVE_STDDEF_H
-HAVE_STDDEF_H:INTERNAL=1
-//Have include HAVE_STDINT_H
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TYPES_H
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include unistd.h
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Result of TRY_COMPILE
-HAVE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Path to an executable
-IOCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO/Testing/Cxx
-//Path to an executable
-ImagingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging/Testing/Cxx
-//Advanced flag for variable: JAVACOMMAND
-JAVACOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin
-//Advanced flag for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_xmesa_INCLUDE_DIR
-OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Advanced flag for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//Path to an executable
-RenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx
-//Advanced flag for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of TRY_RUN
-SIZEOF_FLOAT:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_INT:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_LONG:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_OFF_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_PTRDIFF_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_SHORT:INTERNAL=2
-//Result of TRY_RUN
-SIZEOF_SIZE_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_SSIZE_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_UNSIGNED_CHAR:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//This value is not used by VTK.
-TCL_LIBRARY_DEBUG:INTERNAL=TCL_LIBRARY_DEBUG-NOTFOUND
-//Advanced flag for variable: TCL_STUB_LIBRARY
-TCL_STUB_LIBRARY-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TCL_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtclstubTCLTK_VERSION.a
-//Advanced flag for variable: TCL_STUB_LIBRARY_DEBUG
-TCL_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TCL_STUB_LIBRARY_DEBUG:INTERNAL=TCL_STUB_LIBRARY_DEBUG-NOTFOUND
-//Advanced flag for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_LIBRARY_DEBUG:INTERNAL=TK_LIBRARY_DEBUG-NOTFOUND
-//Advanced flag for variable: TK_STUB_LIBRARY
-TK_STUB_LIBRARY-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtkstubTCLTK_VERSION.a
-//Advanced flag for variable: TK_STUB_LIBRARY_DEBUG
-TK_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_STUB_LIBRARY_DEBUG:INTERNAL=TK_STUB_LIBRARY_DEBUG-NOTFOUND
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//Path to an executable
-TestCxxFeatures_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx
-//Path to an executable
-TestInstantiator_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx
-//Path to an executable
-VTKBenchMark_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Result of TRY_COMPILE
-VTK_CMAKE_EXTENSIONS_COMPILED:INTERNAL=TRUE
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//Advanced flag for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//Disables the automatic initialization of Tk widgets when loading
-// the rendering library.
-VTK_DISABLE_TK_INIT:INTERNAL=OFF
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//Advanced flag for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS:INTERNAL=FALSE
-//Already set VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS
-VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS_TESTED:INTERNAL=1
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=1
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//Advanced flag for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=CDAT_PREFIX/VTK/bin/vtkParseOGLExt
-//Result of TRY_RUN
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Path to the Tcl support library files.
-VTK_TCL_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tclTCLTK_VERSION
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-//
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=ON
-//Advanced flag for variable: VTK_TCL_TK_STATIC
-VTK_TCL_TK_STATIC-ADVANCED:INTERNAL=1
-//Path to the Tk support library files.
-VTK_TK_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tkTCLTK_VERSION
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Advanced flag for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_ANSI_STDLIB
-VTK_USE_ANSI_STDLIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//Have function glXGetProcAddressARB
-VTK_USE_GLX_GET_PROC_ADDRESS_ARB:INTERNAL=1
-//Advanced flag for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_PYTHON_EXE
-VTK_WRAP_PYTHON_EXE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_PYTHON_INIT_EXE
-VTK_WRAP_PYTHON_INIT_EXE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_TCL_EXE
-VTK_WRAP_TCL_EXE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_TCL_INIT_EXE
-VTK_WRAP_TCL_INIT_EXE-ADVANCED:INTERNAL=1
-//Path to an executable
-VolumeRenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering/Testing/Cxx
-//Result of TRY_RUN
-WORDS_BIGENDIAN:INTERNAL=0
-//Path to an executable
-WidgetsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets/Testing/Cxx
-//Have library /usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so
-//
-X11_LIB_X11_SOLO:INTERNAL=1
-//Advanced flag for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Path to an executable
-mkg3states_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff
-//Path to a library
-vtkCommonPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common
-//Whether a library is static, shared or module.
-vtkCommonPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkCommonTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common
-//Whether a library is static, shared or module.
-vtkCommonTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkCommon_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common
-//Whether a library is static, shared or module.
-vtkCommon_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkDICOMParser_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/DICOMParser
-//Whether a library is static, shared or module.
-vtkDICOMParser_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering
-//Whether a library is static, shared or module.
-vtkFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering
-//Whether a library is static, shared or module.
-vtkFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering
-//Whether a library is static, shared or module.
-vtkFiltering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGenericFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering
-//Whether a library is static, shared or module.
-vtkGenericFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkGenericFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering
-//Whether a library is static, shared or module.
-vtkGenericFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGenericFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering
-//Whether a library is static, shared or module.
-vtkGenericFiltering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGraphicsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics
-//Whether a library is static, shared or module.
-vtkGraphicsPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkGraphicsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics
-//Whether a library is static, shared or module.
-vtkGraphicsTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGraphics_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics
-//Whether a library is static, shared or module.
-vtkGraphics_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkHybridPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid
-//Whether a library is static, shared or module.
-vtkHybridPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkHybridTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid
-//Whether a library is static, shared or module.
-vtkHybridTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkHybrid_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid
-//Whether a library is static, shared or module.
-vtkHybrid_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkIOPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO
-//Whether a library is static, shared or module.
-vtkIOPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkIOTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO
-//Whether a library is static, shared or module.
-vtkIOTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkIO_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO
-//Whether a library is static, shared or module.
-vtkIO_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkImagingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging
-//Whether a library is static, shared or module.
-vtkImagingPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkImagingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging
-//Whether a library is static, shared or module.
-vtkImagingTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkImaging_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging
-//Whether a library is static, shared or module.
-vtkImaging_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkNetCDF_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtknetcdf
-//Whether a library is static, shared or module.
-vtkNetCDF_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to an executable
-vtkParseOGLExt_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ParseOGLExt
-//Path to a library
-vtkRenderingPythonTkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRenderingPythonTkWidgets_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRendering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkVolumeRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering
-//Whether a library is static, shared or module.
-vtkVolumeRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkVolumeRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering
-//Whether a library is static, shared or module.
-vtkVolumeRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkVolumeRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering
-//Whether a library is static, shared or module.
-vtkVolumeRendering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkWidgetsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets
-//Whether a library is static, shared or module.
-vtkWidgetsPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkWidgetsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets
-//Whether a library is static, shared or module.
-vtkWidgetsTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets
-//Whether a library is static, shared or module.
-vtkWidgets_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to an executable
-vtkWrapPythonInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtkWrapPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtkWrapTclInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtkWrapTcl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtk_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Tcl
-//Path to a library
-vtkexoIIc_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexodus2
-//Whether a library is static, shared or module.
-vtkexoIIc_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkexpat_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexpat
-//Whether a library is static, shared or module.
-vtkexpat_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkfreetype_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkfreetype
-//Whether a library is static, shared or module.
-vtkfreetype_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkftgl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ftgl
-//Whether a library is static, shared or module.
-vtkftgl_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkjpeg_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkjpeg
-//Whether a library is static, shared or module.
-vtkjpeg_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkpng_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkpng
-//Whether a library is static, shared or module.
-vtkpng_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to an executable
-vtkpython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Python
-//Path to a library
-vtksys_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/kwsys
-//Whether a library is static, shared or module.
-vtksys_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtktiff_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff
-//Whether a library is static, shared or module.
-vtktiff_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkzlib_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkzlib
-//Whether a library is static, shared or module.
-vtkzlib_LIBRARY_TYPE:INTERNAL=SHARED
-
diff --git a/exsrc/blas.sh b/exsrc/blas.sh
deleted file mode 100755
index 921446f3d9b02f34123dc0d96aee9b355c45fff8..0000000000000000000000000000000000000000
--- a/exsrc/blas.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-PACKAGE="blas"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-cd blas*;\
-    # Add f77 support
-unset PGI; \
-    echo $FC ; \
-    env FORTRAN=${FC} make; cp libblas.a ${prefix}/Externals/lib; \
-    
-
diff --git a/exsrc/cairo.sh b/exsrc/cairo.sh
deleted file mode 100755
index 79549148306cb14f45ee8c4c1b3d3f1759a7bb86..0000000000000000000000000000000000000000
--- a/exsrc/cairo.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="cairo"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-PKG_CONFIG=${prefix}/Externals/bin/pkg-config
-export PKG_CONFIG
-(cd cairo-* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/clean_script b/exsrc/clean_script
deleted file mode 100755
index 185cc2b0e81960ddc82b2f57fc16173a159b106f..0000000000000000000000000000000000000000
--- a/exsrc/clean_script
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin/rm -fr build >/dev/null 2>&1
-find . -name 'config.cache' -print -exec rm {} \; 
diff --git a/exsrc/cmake.sh b/exsrc/cmake.sh
deleted file mode 100755
index 069754011e75a684fe017d024434e3bb1c47e285..0000000000000000000000000000000000000000
--- a/exsrc/cmake.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-PACKAGE="cmake"
-. ./prolog.sh
-(   cd cmake*; \
-   ./configure --prefix=${prefix}/Externals; \
-   make; make install
-)
diff --git a/exsrc/curl.sh b/exsrc/curl.sh
deleted file mode 100755
index 951fa4c53890d27431c1ad33e950aaf6470f962d..0000000000000000000000000000000000000000
--- a/exsrc/curl.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="curl"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd curl* ; ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install )
-
diff --git a/exsrc/detect_fortran.py b/exsrc/detect_fortran.py
deleted file mode 100644
index 17c0c5661a71c1f257aa06e1c63a7255d1afa4ae..0000000000000000000000000000000000000000
--- a/exsrc/detect_fortran.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-import os,sys
-
-def detect_fortran_compiler(full_path=True):
-
-
-  fortrans = """
-g77
-gfortran
-f90
-f95
-g95
-xlf90
-fort77
-pgf77
-pgf90
-cf77
-xlf
-ghf77
-"""
-  if os.environ.has_key('FC'):
-    return os.environ['FC']
-
-  for f in fortrans.split():
-    i,o=os.popen4('which '+f)
-    ln=o.readlines()
-    o.close()
-    i.close()
-    if (ln!=[]) and (not 'no' in ln[0].lower().split()) and (not 'not' in ln[0].lower().split()) :
-      if full_path :
-        return ln[0].strip()
-      else:
-        return f
-
-if __name__=="__main__":
-  print detect_fortran_compiler()
diff --git a/exsrc/ffmpeg.sh b/exsrc/ffmpeg.sh
deleted file mode 100755
index 50c6b5949887ae5aedb8e94be41282ce2cd271e1..0000000000000000000000000000000000000000
--- a/exsrc/ffmpeg.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="ffmpeg"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-PKG_CONFIG=${prefix}/Externals/bin/pkg-config
-export PKG_CONFIG
-(cd ffmpeg ; ./configure --enable-pthreads --enable-gpl --enable-pp --enable-swscaler --enable-x11grab --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/find_site.py b/exsrc/find_site.py
deleted file mode 100644
index 39d76dbff46d90a58702404ef1b81b4fa1830ce0..0000000000000000000000000000000000000000
--- a/exsrc/find_site.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# helper routine for installing Pmw since it has no installer.
-import sys, os
-for x in sys.path:
-    y = os.path.basename(x)
-    if y == 'site-packages':
-        print x
-        break
-else:  #If there is none such as on older windows versions
-    print sys.path[-1]
diff --git a/exsrc/fontconfig.sh b/exsrc/fontconfig.sh
deleted file mode 100755
index 060f335fb1ec82278e71b00ed9b3fa636992a9c7..0000000000000000000000000000000000000000
--- a/exsrc/fontconfig.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh
-PACKAGE="fontconfig"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-PKG_CONFIG=${prefix}/Externals/bin/pkg-config
-export PKG_CONFIG
-
-(cd fontconfig-* ; ./configure --prefix=${prefix}/Externals --enable-libxml2 --with-freetype-config=${prefix}/Externals/bin/freetype-config ; make ; make install )
-
diff --git a/exsrc/freetype.sh b/exsrc/freetype.sh
deleted file mode 100755
index a540ae58f63017a2e39948e2909f99f507f65d11..0000000000000000000000000000000000000000
--- a/exsrc/freetype.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="freetype"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd freetype-* ; ./configure --prefix=${prefix}/Externals ; make ; make install ; ln -s ${prefix}/Externals/include/freetype2/freetype ${prefix}/Externals/include/freetype )
-
diff --git a/exsrc/gdal.sh b/exsrc/gdal.sh
deleted file mode 100755
index 714a94bb5b024b4c4f3ca232c6783379eec8da79..0000000000000000000000000000000000000000
--- a/exsrc/gdal.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="gdal"
-. ./prolog.sh
-(cd gdal* ; ./configure --with-libtiff=internal --with-gif=internal --without-cfitsio --prefix=${prefix}/Externals ; make ; make install; ${prefix}/${version}/bin/python setup.py install )
-
diff --git a/exsrc/ghostscript.sh b/exsrc/ghostscript.sh
deleted file mode 100755
index 0a100777be24a48923e3ee3b9c21f677c643b4ee..0000000000000000000000000000000000000000
--- a/exsrc/ghostscript.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="ghostscript"
-. ./prolog.sh
-(mkdir -p ${prefix}/Externals/share/ghostscript ; cd ghostscript-*; ln -s ../libpng-1.2.8 libpng ; ln -s ../jpeg-6b jpeg ; ./configure --prefix=${prefix}/Externals ; make ; make install ; mv ../fonts ${prefix}/Externals/share/ghostscript )
-
diff --git a/exsrc/gifmerge.sh b/exsrc/gifmerge.sh
deleted file mode 100755
index 85a4ac810f65dc1964d9f036e87d464576288ef9..0000000000000000000000000000000000000000
--- a/exsrc/gifmerge.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="gifmerge"
-. ./prolog.sh
-(cd gifmerge* ; make ; mv gifmerge ${prefix}/Externals/bin )
-
diff --git a/exsrc/gifsicle.sh b/exsrc/gifsicle.sh
deleted file mode 100755
index 6ebe09f5fb56d698bd16f13455fe2117f0464859..0000000000000000000000000000000000000000
--- a/exsrc/gifsicle.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="gifsicle"
-. ./prolog.sh
-(cd gifsicle*; ./configure --prefix=${prefix}/Externals ; make install )
-
diff --git a/exsrc/gplot.sh b/exsrc/gplot.sh
deleted file mode 100755
index 2b588cd1f09734a4a04deb1b64f8a39221fd5d41..0000000000000000000000000000000000000000
--- a/exsrc/gplot.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE=gplot
-. ./prolog.sh
-d=`uname`
-(cd gplot; make -f Makefile.${d} ; mv gplot ${prefix}/Externals/bin )
-
diff --git a/exsrc/hdf.sh b/exsrc/hdf.sh
deleted file mode 100755
index f4a8cbf5391e13a582c46b2220f737b2105fe375..0000000000000000000000000000000000000000
--- a/exsrc/hdf.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="HDF"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd HDF* ; env CFLAGS=-DHAVE_NETCDF CXXFLAGS=-DHAVE_NETCDF ./configure --enable-fortran=no --disable-shared --with-jpeg=${prefix}/External/HDF --prefix=${prefix}/Externals/HDF ; make ; make install ; cp -pf ${prefix}/Externals/HDF/bin/* ${prefix}/Externals/bin )
-
diff --git a/exsrc/install_script.obsolete b/exsrc/install_script.obsolete
deleted file mode 100755
index 10979760948a14ebe01e56bf86ddeaf0e935ae99..0000000000000000000000000000000000000000
--- a/exsrc/install_script.obsolete
+++ /dev/null
@@ -1,1154 +0,0 @@
-#!/bin/sh
-
-if [ -n "$PYTHONPATH" ]; then
-    echo "PYTHONPATH environment variable should not be set!"
-    exit 1
-fi
-
-if [ -n "$PYTHONHOME" ]; then
-   echo "PYTHONHOME environment variable should not be set!"
-   exit 1
-fi
-
-cdmsonly=no
-
-OS=`uname`
-NetCDF=yes
-dap=no
-hdf=no 
-freetype=yes
-numpy=yes
-scipy=yes
-ipython=yes
-cairo=yes
-ffmpeg=yes
-blas=yes
-lapack=yes
-lapack95=yes
-
-echo $OS
-if [ "$OS" = "Linux" ]; then
-    pbmplus=no
-    netpbm=no
-elif [ "$OS" = "Darwin" ]; then
-    pbmplus=no
-    netpbm=yes  
-elif [ "$OS" = "CYGWIN_NT-5.1" ]; then
-    pbmplus=no
-    netpbm=yes  
-elif [ "$OS" = "CYGWIN_NT-6.0" ]; then
-    pbmplus=no
-    netpbm=yes  
-else
-    netpbm=no
-    pbmplus=yes
-fi
-
-s=$1; shift;
-Pyfort=yes
-Numeric=no
-XGKS=yes
-Pmw=yes
-gplot=no
-gifsicle=yes
-R=no
-VTK=no
-ghostscript=no
-ioapi=no
-ncfortran=no
-
-while [ "$#" -ne 0 ]
-do
-  # Translate $1 to lowercase
-  MYOPT=`echo $1 | tr 'A-Z' 'a-z'`
-    if [ "$MYOPT" = "--help" ]; then
-        echo "  Builds external software required by CDAT."
-        echo "  Packages builds are:"
-	echo "          numpy 1.3.0.1 (on)"
-	echo "          scipy 0.5.2.1 (on)"
-	echo "          ipython 0.8 (off) (includes ipython1 and Twisted 2.5.0)"
-        echo "          freetype 2.3.4 (on)"
-        echo "          cairo 1.4.12 (on)"
-        echo "          ffmpeg (11/4/2007) (on)"
-        echo "          Pyfort 8.5.5 (on)"
-	echo "          jpeg 6b (on)"
-	echo "          libpng 1.2.8 (on)"
-	echo "          Ghostscript 8.50 with jpeg 6b and libpng 1.2.8 (on)"
-        echo "          NetCDF 3.6.1"
-        echo "          NetCDF-Fortran 3.6.1 (off) to build NetCDF with Fortran"
-        echo "          XGKS (on) with plug to freetype fonts"
-        echo "          Numeric 23.1 (on)"
-        echo "          Pmw 1.3 (on)"
-        echo "          gplot (off)"
-        echo "          gifsicle 1.35 (on)"
-        echo "          netpbm 10.27 (on Linux/Mac, off otherwise)"
-        echo "          pbmplus (off Linux/Mac, on otherwise)"
-        echo "          gifmerge (on)"
-        echo "          opendap 3.5: libdap 3.5.3 libnc-dap 3.5.2"
-        echo "          HDF 4.2.r1 (off)"
-        echo "          R 2.5.0 (off)"
-        echo "          ioapi 3.0 (off) will turn off opendap and on NetCDF-Fortran"
-        echo "		gdal 1.4.3 (off) turned on by ioapi"
-	echo "		proj 4.4.9 (off) turned on by ioapi"
-        echo "  Packages can be turned on/off using --enable-PACKAGE --disable-PACKAGE"
-        echo "  You can build a single Package by passing --PACKAGE-only"
-        echo "  If you already built externals before, or do not wish to build them because you think you already have them"
-        echo "  pass: --disable-externals-build"
-        echo "        This will only build python-based externals"
-        echo "  Notes:"
-        echo "          opendap is very unlikely to build on any non standard platform"
-                                                                                                                             
-
-	exit 1
-    fi
-    if [ "$MYOPT" = "--cdms-only" ]; then
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-	netpbm=no
-	gifmerge=no
-	ghostscript=no
-        freetype=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--ioapi-only" ]; then
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-	netpbm=no
-	gifmerge=no
-	ghostscript=no
-	ioapi=yes
-	ncfortran=yes
-	NetCDF=no
-	dap=no
-	Numeric=no
-	hdf=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--numeric-only" ]; then
-	Numeric=yes
-	dap=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-	netpbm=no
-	gifmerge=no
-        ghostscript=no
-	ioapi=no
-	hdf=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--opendap-only" ]; then
-	Numeric=no
-	dap=yes
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
- 	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--hdf4-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=yes
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
- 	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--netcdf-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=yes
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
- 	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-  fi
-    if [ "$MYOPT" = "--netcdf-fortran-only" ]; then
-	Numeric=no
-	hdf=no
-	dap=no
-	NetCDF=no
-	ncfortran=yes
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--pyfort-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=yes
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--xgks-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=yes
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--pmw-only" ]; then
-	Numeric=no
-	dap=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=yes
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--gplot-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=yes
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--gifsicle-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=yes
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--netpbm-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=yes
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--pbmplus-only"  ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=yes
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--gifmerge-only"  ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=yes
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--r-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-	R=yes
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-#    if [ "$MYOPT" = "--vtk-only" ]; then
-#	Numeric=no
-#	dap=no
-#	hdf=no
-#	NetCDF=no
-#	Pyfort=no
-#	XGKS=no
-#	Pmw=no
-#	gplot=no
-#	gifsicle=no
-#        netpbm=no
-#	pbmplus=no
-#	gifmerge=no
-#	VTK=yes
-#        ghostscript=no
-#        freetype=no
-#	numpy=no
-#	scipy=no
-#	ipython=no
-#    fi
-    if [ "$MYOPT" = "--ghostscript-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=yes
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--freetype-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=yes
-	cairo=no
-	ffmpeg=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--numpy-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=yes
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--scipy-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=yes
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--ipython-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=yes
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--cairo-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-        numpy=no
-        scipy=no
-        ipython=no
-	cairo=yes
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--ffmpeg-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=yes
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--blas-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=yes
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--lapack-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=no
-	lapack=yes
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--lapack95-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=no
-	lapack=no
-	lapack95=yes
-    fi
-# Turn Off Options.....
-    if [ "$MYOPT" = "--disable-opendap" ]; then
-	dap=no
-        if [ $ioapi = no ]; then
-	   NetCDF=yes
-	fi
-	echo "Turning opendap Off"
-    fi
-    if [ "$MYOPT" = "--disable-hdf4" ]; then
-	hdf=no
-	echo "Turning hdf4 Off"
-    fi
-    if [ "$MYOPT" = "--disable-ioapi" ]; then
-	ioapi=no
-	echo "Turning ioapi Off"
-    fi
-    if [ "$MYOPT" = "--disable-ghostscript" ]; then
-	ghostscript=no
-	echo "Turning ghostscript Off"
-    fi
-    if [ "$MYOPT" = "--disable-pyfort" ]; then
-        Pyfort=no
-        echo "Turning Pyfort Off"
-    fi
-    if [ "$MYOPT" = "--disable-numeric" ]; then
-	Numeric=no
-	echo "Turning Numeric Off"
-    fi
-    if [ "$MYOPT" = "--disable-xgks" ]; then
-	XGKS=no
-	echo "Turning XGKS Off"
-    fi
-    if [ "$MYOPT" = "--disable-pmw" ]; then
-	Pmw=no
-	echo "Turning Pmw Off"
-    fi
-    if [ "$MYOPT" = "--disable-gplot" ]; then
-	gplot=no
-	echo "Turning gplot Off"
-    fi
-    if [ "$MYOPT" = "--disable-gifsicle" ]; then
-	gifsicle=no
-	echo "Turning gifsicle Off"
-    fi
-    if [ "$MYOPT" = "--disable-netpbm" ]; then
-	netpbm=no
-	echo "Turning netpbm Off"
-    fi
-    if [ "$MYOPT" = "--disable-pbmplus" ]; then
-	pbmplus=no
-	echo "Turning pbmplus Off"
-    fi
-    if [ "$MYOPT" = "--disable-gifmerge" ]; then
-	gifmerge=no
-	echo "Turning gifmerge Off"
-    fi
-    if [ "$MYOPT" = "--disable-netcdf" ]; then
-	NetCDF=no
-	echo "Turning NetCDF Off"
-    fi
-    if [ "$MYOPT" = "--disable-r"  ]; then
-	R=no
-	echo "Turning R Off"
-    fi
-#    if [ "$MYOPT" = "--disable-vtk"  ]; then
-#	VTK=no
-#	echo "Turning VTK Off"
-#    fi
-    if [ "$MYOPT" = "--disable-freetype" ]; then
-	freetype=no
-	echo "Turning freetype Off"
-    fi
-    if [ "$MYOPT" = "--disable-numpy" ]; then
-	numpy=no
-	echo "Turning numpy Off"
-    fi
-    if [ "$MYOPT" = "--disable-scipy" ]; then
-	scipy=no
-	echo "Turning scipy Off"
-    fi
-    if [ "$MYOPT" = "--disable-ipython" ]; then
-	ipython=no
-	echo "Turning ipython Off"
-    fi
-    if [ "$MYOPT" = "--disable-cairo" ]; then
-	cairo=no
-	echo "Turning cairo Off"
-    fi
-    if [ "$MYOPT" = "--disable-ffmpeg" ]; then
-	ffmpeg=no
-	echo "Turning ffmpeg Off"
-    fi
-    if [ "$MYOPT" = "--disable-blas" ]; then
-	blas=no
-	echo "Turning blas Off"
-    fi
-    if [ "$MYOPT" = "--disable-lapack" ]; then
-	lapack=no
-	lapack95=no
-	echo "Turning lapack and lapack95 Off"
-    fi
-    if [ "$MYOPT" = "--disable-lapack95" ]; then
-	lapack95=no
-	echo "Turning lapack95 Off"
-    fi
-# Turn On Options.....
-    if [ "$MYOPT" = "--enable-ioapi" ]; then
-	ioapi=yes
-	NetCDF=no
-	ncfortran=yes
-	echo "Turning ioapi On"
-    fi
-    if [ "$MYOPT" = "--enable-opendap" ]; then
-	dap=yes
-	echo "Turning opendap On"
-    fi
-    if [ "$MYOPT" = "--enable-pyfort" ]; then
-	Pyfort=yes
-	echo "Turning Pyfort On"
-    fi
-    if [ "$MYOPT" = "--enable-ghostscript" ]; then
-        ghostscript=yes
-        echo "Turning Ghostscript On"
-    fi
-    if [ "$MYOPT" = "--enable-numeric" ]; then
-	Numeric=yes
-	echo "Turning Numeric On"
-    fi
-    if [ "$MYOPT" = "--enable-xgks" ]; then
-	XGKS=yes
-	echo "Turning XGKS On"
-    fi
-    if [ "$MYOPT" = "--enable-pmw" ]; then
-	Pmw=yes
-	echo "Turning Pmw On"
-    fi
-    if [ "$MYOPT" = "--enable-gplot" ]; then
-	gplot=yes
-	echo "Turning gplot On"
-    fi
-    if [ "$MYOPT" = "--enable-gifsicle" ]; then
-	gifsicle=yes
-	echo "Turning gifsicle On"
-    fi
-    if [ "$MYOPT" = "--enable-netpbm" ]; then
-	netpbm=yes
-	echo "Turning netpbm On"
-    fi
-    if [ "$MYOPT" = "--enable-pbmplus" ]; then
-	pbmplus=yes
-	echo "Turning pbmplus On"
-    fi
-    if [ "$MYOPT" = "--enable-gifmerge" ]; then
-	gifmerge=yes
-	echo "Turning gifmerge On"
-    fi
-    if [ "$MYOPT" = "--enable-netcdf" ]; then
-	NetCDF=yes
-	echo "Turning NetCDF On"
-    fi
-    if [ "$MYOPT" = "--enable-r" ]; then
-	R=yes
-	echo "Turning R On"
-    fi
-    if [ "$MYOPT" = "--enable-hdf4" ]; then
-	hdf=yes
-	echo "Turning hdf4 On"
-    fi
-#    if [ "$MYOPT" = "--enable-vtk" ]; then
-#	VTK=yes
-#	echo "Turning VTK On"
-#    fi
-    if [ "$MYOPT" = "--enable-freetype" ]; then
-	freetype=yes
-	echo "Turning freetype On"
-    fi
-    if [ "$MYOPT" = "--enable-numpy" ]; then
-	numpy=yes
-	echo "Turning numpy On"
-    fi
-    if [ "$MYOPT" = "--enable-scipy" ]; then
-	scipy=yes
-	echo "Turning scipy On, do not turn off blas and lapack if they're not on your system"
-    fi
-    if [ "$MYOPT" = "--enable-ipython" ]; then
-	ipython=yes
-	echo "Turning ipython On"
-    fi
-    if [ "$MYOPT" = "--enable-cairo" ]; then
-	cairo=yes
-	echo "Turning cairo On"
-    fi
-    if [ "$MYOPT" = "--enable-ffmpeg" ]; then
-	ffmpeg=yes
-	echo "Turning ffmpeg On"
-    fi
-    if [ "$MYOPT" = "--enable-blas" ]; then
-	blas=yes
-	echo "Turning blas On"
-    fi
-    if [ "$MYOPT" = "--enable-lapack" ]; then
-	lapack=yes
-	echo "Turning lapack On"
-    fi
-    if [ "$MYOPT" = "--enable-ffmpeg" ]; then
-	lapack=yes
-	lapack95=yes
-	echo "Turning lapack and lapack95 On"
-    fi
-    if [ "$MYOPT" = "--disable-externals-build" ]; then
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-	XGKS=no
-        dap=no
-	hdf=no
-        NetCDF=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    shift
-done
-
-## Make sure we don't build NetCDF if opendap is there...
-if [ "$dap" = "yes" ]; then
-    NetCDF=no
-fi
-
-d=`pwd`
-echo "Building external software that CDAT requires."
-echo "See $d/build for logs of the build."
-echo "Any Package can be NOT build by passing --disable-Package"
-echo "If you wish to build 1 Package only, pass --Package-only"
-echo "Packages are: netcdf, netcdf-fortran, opendap, pyfort, numeric, xgks, pmw, gplot, gifsicle,"
-echo "              netpbm, pbmplus, gifmerge, r, ghostscript, ioapi, hdf4, freetype, cairo"
-echo "Note R is not built by default: Pass --enable-r to build R library (Linux only)."
-#echo "Note VTK is not built by default: Pass --enable-vtk to build (linux only)."
-echo "opendap MIGHT work on solaris but probably won't, try to build separately"
-echo "Warning errors in these builds are expected."
-
-
-#(./prolog.sh $s) || (echo "Unpack of tar files failed."; exit 1)
-mkdir -p build
-if [ "$Pyfort" = "yes" ]; then
-    echo "Building Pyfort (Fortran/C interface)"
-    (./pyfort.sh $s 2>&1 | tee build/pyfort.LOG > ../logs/pyfort.LOG) ||  (echo "Build of Pyfort failed."; exit 1)
-fi
-if [ "$ghostscript" = "yes" ]; then
-    echo "Building Ghostscript"
-    (./ghostscript.sh $s 2>&1 | tee build/ghostscript.LOG > ../logs/ghostscript.LOG) ||  (echo "Build of ghostscript failed."; exit 1)
-fi
-if [ "$ffmpeg" = "yes" ]; then
-    echo "Building ffmpeg"
-    (./ffmpeg.sh $s 2>&1 | tee build/ffmpeg.LOG > ../logs/ffmpeg.LOG) ||  (echo "Build of ffmpeg failed."; exit 1)
-fi
-if [ "$freetype" = "yes" ]; then
-    echo "Building Freetype"
-    (./freetype.sh $s 2>&1 | tee build/freetype.LOG > ../logs/freetype.LOG) ||  (echo "Build of freetype failed."; exit 1)
-fi
-if [ "$cairo" = "yes" ]; then
-    echo "Building necessary libs for cairo"
-    echo "  Building xml"
-    (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) ||  (echo "Build of libxml failed."; exit 1)
-    echo "  Building libpixman"
-    (./libpixman.sh $s 2>&1 | tee build/libpixman.LOG > ../logs/libpixman.LOG) ||  (echo "Build of libpixman failed."; exit 1)
-    echo "  Building libpng"
-    (./libpng.sh $s 2>&1 | tee build/libpng.LOG > ../logs/libpng.LOG) ||  (echo "Build of libpng failed."; exit 1)
-    echo "  Building pkgconfig"
-    (./pkgconfig.sh $s 2>&1 | tee build/pkgconfig.LOG > ../logs/pkgconfig.LOG) ||  (echo "Build of pkgconfig failed."; exit 1)
-    echo "  Building fontconfig"
-    (./fontconfig.sh $s 2>&1 | tee build/fontconfig.LOG > ../logs/fontconfig.LOG) ||  (echo "Build of fontconfig failed."; exit 1)
-    echo "Building Cairo"
-    (./cairo.sh $s 2>&1 | tee build/cairo.LOG > ../logs/cairo.LOG) ||  (echo "Build of cairo failed."; exit 1)
-fi
-if [ "$NetCDF" = "yes" ]; then
-    echo "Building netcdf without Fortran support"
-    (./netcdf.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf without fortran failed."; exit 1)
-fi
-if [ "$hdf" = "yes" ]; then
-    echo "Building HDF"
-    echo "... Building JPEG library required by HDF"
-    (./jpeg.sh $s 2>&1 | tee build/jpeg.LOG > ../logs/jpeg.LOG ) || (echo "Build of jpeg."; exit 1)
-    echo "... Building HDF4 library"
-    (./hdf.sh $s 2>&1 | tee build/hdf.LOG > ../logs/hdf.LOG ) || (echo "Build of hdf."; exit 1)
-fi
-if [ "$ncfortran" = "yes" ]; then
-    echo "Building netcdf with Fortran support"
-    (./netcdf_fortran.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf with fortran failed."; exit 1)
-fi
-if [ "$blas" = "yes" ]; then
-    echo "Building blas"
-    (./blas.sh $s 2>&1 | tee build/blas.LOG > ../logs/blas.LOG ) || (echo "Build of blas failed."; exit 1)
-fi
-if [ "$lapack" = "yes" ]; then
-    echo "Building lapack"
-    (./lapack.sh $s 2>&1 | tee build/lapack.LOG > ../logs/lapack.LOG ) || (echo "Build of lapack failed."; exit 1)
-fi
-if [ "$lapack95" = "yes" ]; then
-    echo "Building lapack95"
-    (./lapack95.sh $s 2>&1 | tee build/lapack95.LOG > ../logs/lapack95.LOG ) || (echo "Build of lapack95 failed."; exit 1)
-fi
-if [ "$numpy" = "yes" ]; then
-    if [ "$BLAS" = "" ]; then
-      BLAS=SETBLAS
-      export BLAS
-    fi
-    if [ "$LAPACK" = "" ]; then
-      LAPACK=SETLAPACK
-      export LAPACK
-    fi
-    echo "Building numpy" ${BLAS} ${LAPACK}
-    (./numpy.sh $s 2>&1 | tee build/numpy.LOG > ../logs/numpy.LOG ) || (echo "Build of numpy failed."; exit 1)
-fi
-if [ "$scipy" = "yes" ]; then
-    if [ "$BLAS" = "" ]; then
-      BLAS=SETBLAS
-      export BLAS
-    fi
-    if [ "$LAPACK" = "" ]; then
-      LAPACK=SETLAPACK
-      export LAPACK
-    fi
-    echo "Building scipy"
-    (./scipy.sh $s 2>&1 | tee build/scipy.LOG > ../logs/scipy.LOG ) || (echo "Build of scipy failed."; exit 1)
-fi
-if [ "$ipython" = "yes" ]; then
-    echo "Building ipython and its dependencies"
-    echo "... Building setuptools (with zope interface)"
-    (./setuptools.sh $s 2>&1 | tee build/setuptools.LOG > ../logs/setuptools.LOG ) || (echo "Build of setuptools failed."; exit 1)
-    echo "... Building Twisted (with zope interface)"
-    (./twisted.sh $s 2>&1 | tee build/twisted.LOG > ../logs/twisted.LOG ) || (echo "Build of Twisted failed."; exit 1)
-    echo "... Building ipython1"
-    (./ipython1.sh $s 2>&1 | tee build/ipython1.LOG > ../logs/ipython1.LOG ) || (echo "Build of ipython1 failed."; exit 1)
-    echo "... Building ipython"
-    (./ipython.sh $s 2>&1 | tee build/ipython.LOG > ../logs/ipython.LOG ) || (echo "Build of ipython failed."; exit 1)
-fi
-if [ "$ioapi" = "yes" ]; then
-    echo "Building IOAPI and its dependencies"
-    echo "... Building ioapi"
-    (./ioapi.sh $s 2>&1 | tee build/ioapi.LOG > ../logs/ioapi.LOG ) || (echo "Build of ioapi failed."; exit 1)
-    echo "... Building proj"
-    (./proj.sh $s 2>&1 | tee build/proj.LOG > ../logs/proj.LOG ) || (echo "Build of proj failed."; exit 1)
-    echo "... Building gdal"
-    (./gdal.sh $s 2>&1 | tee build/gdal.LOG > ../logs/gdal.LOG ) || (echo "Build of gdal failed."; exit 1)
-fi
-if [ "$XGKS" = "yes" ]; then
-    echo "Building xgks header files and fonts. (graphics display)"
-    (./xgks.sh $s 2>&1 | tee build/xgks.LOG > ../logs/xgks.LOG ) || (echo "Build of xgks failed."; exit 1)
-fi
-if [ "$Numeric" = "yes" ]; then
-    echo "Building Numeric (numerical operations, masked arrays, etc...)"
-    (./Numeric.sh $s 2>&1 | tee build/Numeric.LOG > ../logs/Numeric.LOG) || (echo "Build of Numeric failed."; exit 1)
-fi
-if [ "$dap" = "yes" ]; then
-    echo "Building opendap (client side only)"
-    echo "... Building curl required by opendap"
-    (./curl.sh $s 2>&1 | tee build/curl.LOG > ../logs/curl.LOG) || (echo "Build of curl failed";exit 1)
-    echo "... Building libxml required by opendap"
-    (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) || (echo "Build of libxml failed";exit 1)
-    echo "... Building libdap required by opendap"
-    (./libdap.sh $s 2>&1 | tee build/libdap.LOG > ../logs/libdap.LOG) || (echo "Build of libdap failed";exit 1)
-    echo "... Building ncdap required by opendap, replaces standard netCDF libraries"
-    (./libnc-dap.sh $s 2>&1 | tee build/libnc-dap.LOG > ../logs/libnc-dap.LOG) || (echo "Build of libncdap failed";exit 1)
-fi
-if [ "$Pmw" = "yes" ]; then
-    echo "Building Pmw (Python Mega Widget, to design GUIs)"
-    (./Pmw.sh $s 2>&1 | tee build/Pmw.LOG > ../logs/Pmw.LOG) || (echo "Build of Pmw failed."; exit 1)
-fi
-if [ "$gplot" = "yes" ]; then
-    echo "Building gplot (postscript output)"
-    (./gplot.sh $s 2>&1 | tee build/gplot.LOG > ../logs/gplot.LOG) || (echo "Build of gplot failed, try manualy."; exit 1)
-fi
-if [ "$gifsicle" = "yes" ]; then
-    echo "Building gifsicle (for animated GIF output)"
-    (./gifsicle.sh $s 2>&1 | tee build/gifsicle.LOG > ../logs/gifsicle.LOG) || (echo "Build of gifsicle failed."; exit 1)
-fi
-if [ "$netpbm" = "yes" ]; then
-    echo "Building netpbm (for GIF output)"
-    (./netpbm.sh $s 2>&1 | tee build/netpbm.LOG > ../logs/netpbm.LOG) || (echo "Build of netpbm failed."; exit 1)
-fi
-if [ "$pbmplus" = "yes" ]; then
-    echo "Building pbmplus (for GIF output)"
-    (./pbmplus.sh $s 2>&1 | tee build/pbmplus.LOG > ../logs/pbmplus.LOG) || (echo "Build of pbmplus failed."; exit 1)
-fi
-if [ "$gifmerge" = "yes" ]; then
-    echo "Building gifmerge (for GIF output)"
-    (./gifmerge.sh $s 2>&1 | tee build/gifmerge.LOG > ../logs/gifmerge.LOG) || (echo "Build of gifmerge failed."; exit 1)
-fi
-if [ "$R" = "yes" ]; then
-    echo "Building R statistical library"
-    (./R.sh $s 2>&1 | tee build/R.LOG > ../logs/R.LOG) || (echo "Build of R failed";exit 1)
-fi
-#if [ "$VTK" = "yes" ]; then
-#    echo "Building cmake (required by VTK)"
-#    (./cmake.sh $s 2>&1 | tee build/cmake.LOG > ../logs/cmake.LOG) || (echo "Build of cmake failed";exit 1)
-#    echo "Building VTK"
-#    (./vtk.sh $s 2>&1 | tee build/VTK.LOG > ../logs/VTK.LOG) || (echo "Build of VTK failed";exit 1)
-#fi
-echo "Done with building the external software."
diff --git a/exsrc/ioapi.sh b/exsrc/ioapi.sh
deleted file mode 100755
index a2f973a9024fa3ea2de3cfea386e9483852901a1..0000000000000000000000000000000000000000
--- a/exsrc/ioapi.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-BUILD=`pwd`
-export BUILD
-PACKAGE="ioapi"
-. ./prolog.sh
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-(cd ioapi*/ioapi; \
-    # build the library
-    make -f Makefile.nocpl; \
-    # go to the object/lib directory
-    # and run ranlib (only needed for Darwin)
-    # but doesn't effect the build
-    cd ../neutral_g77; \
-    ranlib libioapi.a; \
-
-    # copy the library to pyIoapi contrib package
-    # and the installation directory (prefix)
-#    echo "Copying IOAPI library to pyIoapi package" ; \
-#    cp libioapi.a ../../../../contrib/pyIoapi/Src/lib_external; \
-    cp libioapi.a ${prefix}/Externals/lib;
-)
diff --git a/exsrc/ipython.sh b/exsrc/ipython.sh
deleted file mode 100755
index 66166ce8acf506ba8c4694041c5c91e0cf868d80..0000000000000000000000000000000000000000
--- a/exsrc/ipython.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="ipython"
-. ./prolog.sh 
-# ipython.
-(cd ipython-* ; ${prefix}/${version}/bin/python setup.py build ${D} install)
diff --git a/exsrc/ipython1.sh b/exsrc/ipython1.sh
deleted file mode 100755
index db6b6e84e41f7ecec0fdd5b1b37951dc04105a01..0000000000000000000000000000000000000000
--- a/exsrc/ipython1.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="ipython1"
-. ./prolog.sh 
-# ipython1.
-(cd ipython1*; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/jpeg.sh b/exsrc/jpeg.sh
deleted file mode 100755
index 206570e3acf8c23a453075d89a5f5376ac01732e..0000000000000000000000000000000000000000
--- a/exsrc/jpeg.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="jpeg"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(mkdir ${prefix}/Externals/HDF ; mkdir ${prefix}/Externals/HDF/lib ; mkdir ${prefix}/Externals/HDF/include ; cd jpeg* ; ./configure --prefix=${prefix}/Externals/HDF ; make ; mv libjpeg.a ${prefix}/Externals/HDF/lib ; cp *.h ${prefix}/Externals/HDF/include )
-
diff --git a/exsrc/lapack.sh b/exsrc/lapack.sh
deleted file mode 100755
index 73df47e3de4f98f73af121bfec29e78946259608..0000000000000000000000000000000000000000
--- a/exsrc/lapack.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-PACKAGE="lapack-lite"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-cd lapack-lite*;\
-    # Add f77 support
-unset PGI; \
-    echo $FC ; \
-    env LOADER=${FC} FORTRAN=${FC} BLAS=${prefix}/Externals/libblas.a make; cp liblapack.a libtmglib.a ${prefix}/Externals/lib; \
-    
-
diff --git a/exsrc/lapack95.sh b/exsrc/lapack95.sh
deleted file mode 100755
index b4344cd6d0ac3d3630550c0cefdd1fad90db45b4..0000000000000000000000000000000000000000
--- a/exsrc/lapack95.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-PACKAGE="lapack95"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-cd lapack95*/SRC;\
-    # Add f77 support
-unset PGI; \
-    echo $FC ; \
-    env LAPACK_PATH=${prefix}/Externals/lib make; cp ../lapack95.a  ${prefix}/Externals/lib/liblapack95.a; cp ../lapack95_modules/* ${prefix}/Externals/include  \
-    
-
diff --git a/exsrc/libcf.sh b/exsrc/libcf.sh
deleted file mode 100755
index 5e0add5c34289a15b54078ea62541c9c692fdad2..0000000000000000000000000000000000000000
--- a/exsrc/libcf.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-
-PACKAGE="libcf"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-NC4LOC=`grep NC4LOC ../config.log | sed 's/NC4LOC=//' | sed "s/'//"`
-HDF5LOC=`grep HDF5LOC ../config.log | sed 's/HDF5LOC=//' | sed "s/'//"`
-
-echo "prefix is ${prefix}"
-echo "using netcdf at $NC4LOC, using hdf5 at $HDF5LOC"
-
-(cd libcf*; \ 
-  mkdir ${prefix}/Externals/libcf ; \  
-  mkdir ${prefix}/Externals/NetCDF ; \
-  ./configure --prefix=${prefix}/Externals/NetCDF --with-netcdf=$NC4LOC --with-hdf5=$HDF5LOC --enable-shared; \
-  make; make install
-)
-
diff --git a/exsrc/libdap.sh b/exsrc/libdap.sh
deleted file mode 100755
index d79e566c8c7c05807bacb1dff1138c308240e903..0000000000000000000000000000000000000000
--- a/exsrc/libdap.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="libdap"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-(cd libdap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install )
-
diff --git a/exsrc/libnc-dap.sh b/exsrc/libnc-dap.sh
deleted file mode 100755
index de5bb66fc246bd403b65184a7483b07c7aa28ac2..0000000000000000000000000000000000000000
--- a/exsrc/libnc-dap.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="libnc-dap"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-(cd libnc-dap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ; cp -pf ${prefix}/Externals/OpenDAP/bin/* ${prefix}/Externals/bin )
-
diff --git a/exsrc/libpixman.sh b/exsrc/libpixman.sh
deleted file mode 100755
index 2b8c09e00ac48ba20770f217b8d9e0e41a59ce9d..0000000000000000000000000000000000000000
--- a/exsrc/libpixman.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="pixman"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd pixman* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/libpng.sh b/exsrc/libpng.sh
deleted file mode 100755
index 2cb505cc3c6c103a298bc721b27e90d18f28ae30..0000000000000000000000000000000000000000
--- a/exsrc/libpng.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="libpng"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd libpng* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/libxml.sh b/exsrc/libxml.sh
deleted file mode 100755
index de23dc8cb46ec887a00071b0bfcafb95beb2d43f..0000000000000000000000000000000000000000
--- a/exsrc/libxml.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="libxml"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd libxml2* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/netcdf.sh b/exsrc/netcdf.sh
deleted file mode 100755
index 6222460fd5d86d715d92f788537df204c41b43f5..0000000000000000000000000000000000000000
--- a/exsrc/netcdf.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/sh
-PACKAGE="netcdf"
-. ./prolog.sh
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-    CXX=""; export CXX
-fi
-
-echo "prefix is"${prefix}
-# Define compilation flags for itanium based NEC TX-7 (and gcc) -> ia64
-# Also define compilation flags for SGI Altrix (and gcc) -> ia64
-# Same for AMD Opteron based HP Proliant DL585                  -> x86_64
-# export CFLAGS="$CFLAGS -fpic -O"
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-    export CFLAGS="$CFLAGS -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-    export CFLAGS="$CFLAGS -fPIC"
-fi
-
-if (test `uname ` = "CYGWIN_NT-5.1") then
-(cd netcdf*; \
-    FC=''; export FC; \
-    F90='';export F90; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF ; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-elif (test `uname ` = "CYGWIN_NT-6.0") then
-(cd netcdf*; \
-    FC=''; export FC; \
-    F90='';export F90; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF ; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-else
-(cd netcdf*; \
-    FC=''; export FC; \
-    F90='';export F90; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF ; \
-    ./configure --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-fi
diff --git a/exsrc/netcdf_fortran.sh b/exsrc/netcdf_fortran.sh
deleted file mode 100755
index bbf4c9886527c755e9ad8e46ebd6d552f45a4982..0000000000000000000000000000000000000000
--- a/exsrc/netcdf_fortran.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/sh
-PACKAGE="netcdf"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-if (test `uname ` = "CYGWIN_NT-5.1") then 
-(cd netcdf*; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-elif (test `uname ` = "CYGWIN_NT-6.0") then 
-(cd netcdf*; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-else
-(cd netcdf*;\
-    # Add f77 support
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF; \
-    ./configure --prefix=${prefix}/Externals/NetCDF; \
-    make; make install; \
-)
-fi
-
diff --git a/exsrc/netpbm.input.conf b/exsrc/netpbm.input.conf
deleted file mode 100644
index a7f73f85fea7b13f38bfc8ab2bcb2dd244d67542..0000000000000000000000000000000000000000
--- a/exsrc/netpbm.input.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-
-static
-
-none
-none
-none
-none
-
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.conf.Cygwin b/exsrc/netpbm.input.conf.Cygwin
deleted file mode 100644
index 5bd669a10cf3253edfe2b126bf57526ce6241384..0000000000000000000000000000000000000000
--- a/exsrc/netpbm.input.conf.Cygwin
+++ /dev/null
@@ -1,18 +0,0 @@
-
-gnu
-
-static
-
-none
-none
-none
-
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.conf.Darwin b/exsrc/netpbm.input.conf.Darwin
deleted file mode 100644
index 81ee298864fd2f7c25e278d7e47b73807310d6ba..0000000000000000000000000000000000000000
--- a/exsrc/netpbm.input.conf.Darwin
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-
-
-static
-
-none
-none
-none
-none
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.conf.sun b/exsrc/netpbm.input.conf.sun
deleted file mode 100644
index ae45aa38cba3a35f3ba26a7b3ac6c569b95e4db0..0000000000000000000000000000000000000000
--- a/exsrc/netpbm.input.conf.sun
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-cc
-sun
-
-
-static
-none
-none
-none
-
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.inst b/exsrc/netpbm.input.inst
deleted file mode 100644
index c9167ec1e6cad19b28584602ea2edc3e33aefb23..0000000000000000000000000000000000000000
--- a/exsrc/netpbm.input.inst
+++ /dev/null
@@ -1,9 +0,0 @@
-INST_PREFIX
-CDAT_PREFIX
-
-
-
-
-
-N
-
diff --git a/exsrc/netpbm.sh b/exsrc/netpbm.sh
deleted file mode 100755
index 1e5d07180630feb04876c24aabc56e89c4b661e0..0000000000000000000000000000000000000000
--- a/exsrc/netpbm.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-PACKAGE="netpbm"
-OS=`uname`
-if ( test "${OS}" = 'Darwin' ) then
-    echo "Darwin" ;
-    CONF_FILE=netpbm.input.conf.Darwin;
-elif ( test "${OS}" = 'sunOS' ) then
-    echo "Sun OS";
-    CONF_FILE=netpbm.input.conf.sun;
-elif ( test "${OS}" = 'Linux' ) then
-    echo "GNU Linux";
-    CONF_FILE=netpbm.input.conf;
-elif ( test "${OS}" = 'CYGWIN_NT-5.1' ) then
-    echo "GNU Build for Cygwin";
-    CONF_FILE=netpbm.input.conf.Cygwin;
-elif ( test "${OS}" = 'CYGWIN_NT-6.0' ) then
-    echo "GNU Build for Cygwin";
-    CONF_FILE=netpbm.input.conf.Cygwin;
-else
-    echo "Platform not tested, using GNU conf file";
-    echo "If hangs or fails try manually or use pbmplus";
-fi
-. ./prolog.sh
-(  
-   cd netpbm*; \
-   BUILD_DIR=`pwd`;\
-   sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \
-       -e 's@INST_PREFIX@'${BUILD_DIR}'/TMP@g' \
-        ../../netpbm.input.inst > netpbm.input.inst.feed ; \
-   ./configure < ../../${CONF_FILE} ; \
-   make ; \
-   make package pkgdir=${BUILD_DIR}/TMP; \
-   ./installnetpbm < netpbm.input.inst.feed ; \
-   rm -rf  ${BUILD_DIR}/TMP 
-)
diff --git a/exsrc/numpy.sh b/exsrc/numpy.sh
deleted file mode 100755
index a1560bcbb3f30b1af01b0566b8278e645e6d97d6..0000000000000000000000000000000000000000
--- a/exsrc/numpy.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/sh
-PACKAGE="numpy"
-. ./prolog.sh 
-# Handle x86_64 arch
-CDATARCH=`uname -m`
-if (test "${CDATARCH}" = "x86_64") then
-  cd numpy-*
-  cat >site.cfg <<EOF
-# Defaults
-#  ========
-# The settings given here will apply to all other sections if not overridden.
-# This is a good place to add general library and include directories like
-# /usr/local/{lib,include}
-#
-[DEFAULT]
-library_dirs = /usr/lib64
-EOF
-  cd ..
-fi
-if (test "${BLAS}" = "SETBLAS") then
-BLAS=${prefix}/Externals/lib/libblas.a
-export BLAS
-fi
-if (test "${LAPACK}" = "SETLAPACK") then
-LAPACK=${prefix}/Externals/lib/liblapack.a
-export LAPACK
-fi
-# Numpy.
-(cd numpy-*; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/pbmplus.sh b/exsrc/pbmplus.sh
deleted file mode 100755
index 03f48e0433fc149ee8983788740ef8a47f2561ea..0000000000000000000000000000000000000000
--- a/exsrc/pbmplus.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-PACKAGE="pbmplus"
-. ./prolog.sh
-d=`uname`
-if ( test "Linux" = "${d}" ) then
-( INSTALLBINARIES=${prefix}/Externals/bin; export INSTALLBINARIES; INSTALLMANUALS=${prefix}/Externals/man/mann ; export INSTALLMANUALS ; CC="gcc -ansi" ; export CC;cd pbmplus; make install )
-else
-( INSTALLBINARIES=${prefix}/Externals/bin; export INSTALLBINARIES ; INSTALLMANUALS=${prefix}/Externals/man/mann ; export INSTALLMANUALS ; cd pbmplus; make install )
-fi
diff --git a/exsrc/pixman.def b/exsrc/pixman.def
deleted file mode 100644
index 4b69464c5f88d4cb7b096a479014a55ed04fe03b..0000000000000000000000000000000000000000
--- a/exsrc/pixman.def
+++ /dev/null
@@ -1,62 +0,0 @@
-EXPORTS
-pixman_region_set_static_pointers
-pixman_region_init
-pixman_region_init_rect
-pixman_region_init_with_extents
-pixman_region_fini
-pixman_region_translate
-pixman_region_copy
-pixman_region_intersect
-pixman_region_union
-pixman_region_union_rect
-pixman_region_subtract
-pixman_region_inverse
-pixman_region_contains_point
-pixman_region_contains_rectangle
-pixman_region_not_empty
-pixman_region_extents
-pixman_region_n_rects
-pixman_region_rectangles
-pixman_region_equal
-pixman_region_selfcheck
-pixman_region_reset
-pixman_region_init_rects
-pixman_blt
-pixman_fill
-pixman_image_create_solid_fill
-pixman_image_create_linear_gradient
-pixman_image_create_radial_gradient
-pixman_image_create_conical_gradient
-pixman_image_create_bits
-pixman_image_ref
-pixman_image_unref
-pixman_image_set_clip_region
-pixman_image_set_has_client_clip
-pixman_image_set_transform
-pixman_image_set_repeat
-pixman_image_set_filter
-pixman_image_set_source_clipping
-pixman_image_set_alpha_map
-pixman_image_set_component_alpha
-pixman_image_set_accessors
-pixman_image_set_indexed
-pixman_image_get_data
-pixman_image_get_width
-pixman_image_get_height
-pixman_image_get_stride
-pixman_image_get_depth
-pixman_image_fill_rectangles
-pixman_compute_composite_region
-pixman_image_composite
-pixman_sample_ceil_y
-pixman_sample_floor_y
-pixman_edge_step
-pixman_edge_init
-pixman_line_fixed_edge_init
-pixman_rasterize_edges
-pixman_add_traps
-pixman_add_trapezoids
-pixman_rasterize_trapezoid
-pixman_format_supported_destination
-pixman_transform_point_3d
-LIBRARY libpixman-1.dll
diff --git a/exsrc/pkgconfig.sh b/exsrc/pkgconfig.sh
deleted file mode 100755
index b72b9fedddc419825e0ea75af2900fb57dc93e67..0000000000000000000000000000000000000000
--- a/exsrc/pkgconfig.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="pkgconfig"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd pkgconfig-* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/proj.sh b/exsrc/proj.sh
deleted file mode 100755
index 108cce340c90c28f89dd887463b87bed9611d171..0000000000000000000000000000000000000000
--- a/exsrc/proj.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="proj"
-. ./prolog.sh
-(cd proj* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/prolog.sh b/exsrc/prolog.sh
deleted file mode 100755
index 9d0e7b6d6383cd5f510c9211e0c912cf08308ad8..0000000000000000000000000000000000000000
--- a/exsrc/prolog.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/sh
-# superstition
-## Undoing superstition, let's be rational here.
-#unset PYTHONPATH
-#unset PYTHONSTARTUP
-#unset PYTHONHOME
-
-if (test "$1" = "--debug") then
-    D="--debug";
-    OPT=-g; 
-    shift
-else
-    D="";
-    OPT=${OPT:=-O}
-fi
-export OPT
-
-OS=`uname`
-if (test -z "$1") then
-    echo "Usage: $0 prefix";
-    exit 1
-fi
-version=`more ../version`
-
-if (test ! -d $1) then
-    echo -n "$1 is not a directory; create it? (y/[n])";
-    y='n'
-    read y;
-    if (test ${y} = 'y') then
-        mkdir -p $1/${version}/bin; mkdir $1/${version}/lib; mkdir $1/${version}/include ; mkdir -p $1/Externals/bin ; mkdir $!/Externals/lib ; mkdir $1/Externals/share ; mkdir $1/Externals/include
-        if (test ! -d $1) then
-            echo "Could not create $1, installation aborted.";
-            exit 1
-        fi
-    else
-        echo 'Installation aborted.';
-        exit 1
-    fi
-fi
-prefix=`(cd $1;pwd)`
-
-cp -f detect_fortran.py ${prefix}/${version}/bin/detect_fortran.py;
-chmod +x  ${prefix}/${version}/bin/detect_fortran.py;
-
-#mkdir -p build
-/bin/cp src/${PACKAGE}*gz build
-cd build
-chmod +w ${PACKAGE}*gz
-echo "untarring "${PACKAGE}
-
-for x in ${PACKAGE}*gz;
-    do
-        echo $x;
-        echo `basename $x .gz`;
-	gunzip -f $x;
-	tar xf `basename $x .gz`;
-	/bin/rm -f `basename $x .gz`;
-    done
-
-
-#if (test ! -d build) then
-#    # Unpack everything into build
-#    mkdir -p build
-#    /bin/cp src/*.gz build
-#    cd build
-#    chmod +w *.gz 
-#    for x in *.gz; 
-#    do 
-#        echo "$x"; 
-#        gunzip -f $x;
-#        tar xf `basename $x .gz`;
-#        /bin/rm -f `basename $x .gz`
-#    done
-#    cd ..
-#fi
-echo "Installation of ${PACKAGE} to ${prefix}"
diff --git a/exsrc/pyfort.sh b/exsrc/pyfort.sh
deleted file mode 100755
index 361e67ea01f49d382fdc431d2950b034e98101a4..0000000000000000000000000000000000000000
--- a/exsrc/pyfort.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-PACKAGE="Pyfort"
-. ./prolog.sh
-(cd Pyfort*; ${prefix}/${version}/bin/python setup.py build ${D} install )
diff --git a/exsrc/setuptools.sh b/exsrc/setuptools.sh
deleted file mode 100755
index 5082091d3e1587f0e20123a6215a9d84c966bb78..0000000000000000000000000000000000000000
--- a/exsrc/setuptools.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="setuptools"
-. ./prolog.sh 
-# Twisted.
-(cd setuptools-* ; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/src/CMakeCache.txt.linux.in b/exsrc/src/CMakeCache.txt.linux.in
deleted file mode 100644
index 15a056b2839e0f6aae09bbbe48fba73c36ee2489..0000000000000000000000000000000000000000
--- a/exsrc/src/CMakeCache.txt.linux.in
+++ /dev/null
@@ -1,1965 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: @BUILD_DIR@/VTK-build
-# It was generated by CMake: cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=ON
-
-//Build Verdict with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=OFF
-
-//Path to a program.
-BZRCOMMAND:FILEPATH=BZRCOMMAND-NOTFOUND
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=
-
-//Path to a program.
-CMAKE_INSTALL_NAME_TOOL:FILEPATH=/usr/bin/install_name_tool
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=@PREFIX_PATH@
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=CMAKE_OBJCOPY-NOTFOUND
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=CMAKE_OBJDUMP-NOTFOUND
-
-//Build architectures for OSX
-CMAKE_OSX_ARCHITECTURES:STRING=
-
-//Minimum OS X version to target for deployment (at runtime); newer
-// APIs weak linked. Set to empty string for default value.
-CMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.5
-
-//The product will be built against the headers and libraries located
-// inside the indicated SDK.
-CMAKE_OSX_SYSROOT:PATH=/Developer/SDKs/MacOSX10.5.sdk
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=VTK
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=1
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=/usr/X11R6/lib/libSM.dylib;/usr/X11R6/lib/libICE.dylib;/usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-
-//Path to the coverage program that CTest uses for performing coverage
-// inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Maximum time allowed before CTest will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/DICOMParser
-
-//Path to a program.
-HGCOMMAND:FILEPATH=HGCOMMAND-NOTFOUND
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/make -i
-
-//Path to the memory checking command, used for memory error detection.
-MEMORYCHECK_COMMAND:FILEPATH=/usr/local/bin/valgrind
-
-//File that contains suppressions for the memory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//Value Computed by CMake
-MaterialLibrary_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary
-
-//Value Computed by CMake
-MaterialLibrary_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/MaterialLibrary
-
-//Include for OpenGL on OSX
-OPENGL_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//OpenGL lib for OSX
-OPENGL_gl_LIBRARY:FILEPATH=/usr/X11R6/lib/libGL.dylib
-
-//AGL lib for OSX
-OPENGL_glu_LIBRARY:FILEPATH=/usr/X11R6/lib/libGLU.dylib
-
-//Path to a file.
-OPENGL_xmesa_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//Does an external project define proj_list or should libproj4
-// define it?
-PROJ_LIST_EXTERNAL:BOOL=OFF
-
-//Should libproj4 include projection code that relies on GSL?
-PROJ_USE_GSL:BOOL=OFF
-
-//Should libproj4 be built as a thread-friendly library?
-PROJ_USE_PTHREADS:BOOL=OFF
-
-//Path to a library.
-PYTHON_DEBUG_LIBRARY:FILEPATH=/Library/Frameworks/python.framework
-
-//Add module vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython:BOOL=ON
-
-//Add module vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython:BOOL=ON
-
-//Add module vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython:BOOL=ON
-
-//Add module vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython:BOOL=ON
-
-//Add module vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython:BOOL=ON
-
-//Add module vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython:BOOL=ON
-
-//Add module vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython:BOOL=ON
-
-//Add module vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython:BOOL=ON
-
-//Add module vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython:BOOL=ON
-
-//Add module vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython:BOOL=ON
-
-//Add module vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython:BOOL=ON
-
-//Add module vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython:BOOL=ON
-
-//Add module vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython:BOOL=ON
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=@PREFIX_PATH@/bin/python@PYVER@
-
-//Extra libraries to link when linking to python (such as "z" for
-// zlib).  Separate multiple libraries with semicolons.
-PYTHON_EXTRA_LIBS:STRING=
-
-//Path to a file.
-PYTHON_INCLUDE_PATH:PATH=@PREFIX_PATH@/include/python@PYVER@
-
-//Path to a library.
-PYTHON_LIBRARY:FILEPATH=@PREFIX_PATH@/lib/libpython@PYVER@.dylib
-
-//Add module vtkCommonPython shared
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkFilteringPython shared
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGenericFilteringPython shared
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGeovisPython shared
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGraphicsPython shared
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkHybridPython shared
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkIOPython shared
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkImagingPython shared
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkInfovisPython shared
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkRenderingPython shared
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkViewsPython shared
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkVolumeRenderingPython shared
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkWidgetsPython shared
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED:BOOL=ON
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.dylib
-
-//Path to scp command, used by CTest for submitting results to
-// a Dart server
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=omar
-
-//Path to the SLURM sbatch executable
-SLURM_SBATCH_COMMAND:FILEPATH=SLURM_SBATCH_COMMAND-NOTFOUND
-
-//Path to the SLURM srun executable
-SLURM_SRUN_COMMAND:FILEPATH=SLURM_SRUN_COMMAND-NOTFOUND
-
-//Path to a program.
-SVNCOMMAND:FILEPATH=svn
-
-//Path to a file.
-TCL_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TCL_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=@EXTERNALS@/bin/tclsh@TCLTK_VERSION@
-
-//Path to a file.
-TK_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//The path to the Tk internal headers (tkMacOSXDefault.h).
-TK_INTERNAL_PATH:PATH=@BUILD_DIR@/VTK/Utilities/TclTk/internals/tk8.4
-
-//Path to a library.
-TK_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib
-
-//Build the 2007 Verdict User Manual
-VERDICT_BUILD_DOC:BOOL=OFF
-
-//Should tests of the VERDICT library be built?
-VERDICT_ENABLE_TESTING:BOOL=OFF
-
-//Mangle verdict names for inclusion in a larger library?
-VERDICT_MANGLE:BOOL=ON
-
-//VTK requires the verdict prefix to be vtk
-VERDICT_MANGLE_PREFIX:STRING=vtk
-
-//VTK requires doubles
-VERDICT_USE_FLOAT:BOOL=OFF
-
-//Path to a library.
-VLI_LIBRARY_FOR_VP1000:FILEPATH=VLI_LIBRARY_FOR_VP1000-NOTFOUND
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build
-
-//Path to a file.
-VTK_DATA_ROOT:PATH=@BUILD_DIR@/VTKData
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//; separated directories to search for materials/shaders
-VTK_MATERIALS_DIRS:STRING=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-
-//Disable multithreading support in the Python bindings
-VTK_NO_PYTHON_THREADS:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Arguments passed to "python setup.py install ..." during installation.
-VTK_PYTHON_SETUP_ARGS:STRING=--prefix="${CMAKE_INSTALL_PREFIX}"
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK
-
-//VTK tests call vtkFloatingPointExceptions::Enable()
-VTK_TESTING_USE_FPE:BOOL=ON
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=OFF
-
-//Use Boost libraries for graph algorithms - www.boost.org.
-VTK_USE_BOOST:BOOL=OFF
-
-//Build classes using Carbon API.
-VTK_USE_CARBON:BOOL=OFF
-
-//Build pixel and vertex shader support for Cg.
-VTK_USE_CG_SHADERS:BOOL=OFF
-
-//Build classes using Cocoa API.
-VTK_USE_COCOA:BOOL=OFF
-
-//Turn this option off and tests and warning/error macros will
-// not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//If the FFMPEG library is available, should VTK use it for saving
-// .avi animation files?
-VTK_USE_FFMPEG_ENCODER:BOOL=OFF
-
-//Build the vtkGeovis kit.  Needed for performing geographic visualization.
-VTK_USE_GEOVIS:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build pixel and vertex shader support for GLSL.
-VTK_USE_GLSL_SHADERS:BOOL=ON
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Build the vtkInfovis kit.  Needed for performing information
-// visualization.
-VTK_USE_INFOVIS:BOOL=ON
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Build metaio
-VTK_USE_METAIO:BOOL=ON
-
-//Enable use of the patented mpeg2 library. You are solely responsible
-// for any legal issues associated with using patented code in
-// your software.
-VTK_USE_MPEG2_ENCODER:BOOL=OFF
-
-//Build the MySQL driver for vtkSQLDatabase.
-VTK_USE_MYSQL:BOOL=OFF
-
-//Add support for arbitrary-dimension sparse and dense arrays.
-VTK_USE_N_WAY_ARRAYS:BOOL=OFF
-
-//Build the ODBC database interface
-VTK_USE_ODBC:BOOL=OFF
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the PostgreSQL driver for vtkSQLDatabase.
-VTK_USE_POSTGRES:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's proj4 library.
-VTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF
-
-//Use the system's libxml2 library.
-VTK_USE_SYSTEM_LIBXML2:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Build VTK with Tk support
-VTK_USE_TK:BOOL=ON
-
-//Build the vtkViews kit.  Needed for creating packaged and linked
-// views.
-VTK_USE_VIEWS:BOOL=ON
-
-//Enable support for VolumePro 1000.
-VTK_USE_VOLUMEPRO_1000:BOOL=OFF
-
-//Build classes for the X11 window system.
-VTK_USE_X:BOOL=ON
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Path to a file.
-VTK_WRAP_HINTS:FILEPATH=@BUILD_DIR@/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=OFF
-
-//Path to a file.
-X11_ICE_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_ICE_LIB:FILEPATH=/usr/X11R6/lib/libICE.dylib
-
-//Path to a library.
-X11_SM_LIB:FILEPATH=/usr/X11R6/lib/libSM.dylib
-
-//Path to a file.
-X11_X11_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.dylib
-
-//Path to a file.
-X11_XShm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_XTest_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_XTest_LIB:FILEPATH=/usr/X11R6/lib/libXtst.dylib
-
-//Path to a file.
-X11_Xaccessrules_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xaccessstr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xau_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xau_LIB:FILEPATH=/usr/X11R6/lib/libXau.dylib
-
-//Path to a file.
-X11_Xcomposite_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcomposite_LIB:FILEPATH=/usr/X11R6/lib/libXcomposite.dylib
-
-//Path to a file.
-X11_Xcursor_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcursor_LIB:FILEPATH=/usr/X11R6/lib/libXcursor.dylib
-
-//Path to a file.
-X11_Xdamage_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdamage_LIB:FILEPATH=/usr/X11R6/lib/libXdamage.dylib
-
-//Path to a file.
-X11_Xdmcp_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdmcp_LIB:FILEPATH=/usr/X11R6/lib/libXdmcp.dylib
-
-//Path to a library.
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.dylib
-
-//Path to a file.
-X11_Xfixes_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xfixes_LIB:FILEPATH=/usr/X11R6/lib/libXfixes.dylib
-
-//Path to a file.
-X11_Xft_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xft_LIB:FILEPATH=/usr/X11R6/lib/libXft.dylib
-
-//Path to a file.
-X11_Xinerama_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinerama_LIB:FILEPATH=/usr/X11R6/lib/libXinerama.dylib
-
-//Path to a file.
-X11_Xinput_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinput_LIB:FILEPATH=/usr/X11R6/lib/libXi.dylib
-
-//Path to a file.
-X11_Xkb_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xkblib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xlib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xpm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xpm_LIB:FILEPATH=/usr/X11R6/lib/libXpm.dylib
-
-//Path to a file.
-X11_Xrandr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrandr_LIB:FILEPATH=/usr/X11R6/lib/libXrandr.dylib
-
-//Path to a file.
-X11_Xrender_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrender_LIB:FILEPATH=/usr/X11R6/lib/libXrender.dylib
-
-//Path to a file.
-X11_Xscreensaver_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xscreensaver_LIB:FILEPATH=/usr/X11R6/lib/libXss.dylib
-
-//Path to a file.
-X11_Xshape_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xt_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xt_LIB:FILEPATH=/usr/X11R6/lib/libXt.dylib
-
-//Path to a file.
-X11_Xutil_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xv_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xv_LIB:FILEPATH=/usr/X11R6/lib/libXv.dylib
-
-//Path to a library.
-X11_Xxf86misc_LIB:FILEPATH=/usr/X11R6/lib/libXxf86misc.dylib
-
-//Path to a file.
-X11_dpms_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_fontconfig_LIB:FILEPATH=/usr/X11R6/lib/libfontconfig.dylib
-
-//Path to a file.
-X11_xf86misc_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86vmode_INCLUDE_PATH:PATH=/usr/include
-
-//Value Computed by CMake
-alglib_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkalglib
-
-//Value Computed by CMake
-alglib_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkalglib
-
-//Value Computed by CMake
-libproj4_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibproj4
-
-//Value Computed by CMake
-libproj4_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibproj4
-
-//Value Computed by CMake
-verdict_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/verdict
-
-//Value Computed by CMake
-verdict_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/verdict
-
-//Dependencies for the target
-vtkCommonPythonD_LIB_DEPENDS:STATIC=general;vtkCommon;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=general;vtksys;general;-lpthread;general;-lm;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=general;vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkGenericFiltering;general;vtkFilteringPythonD;general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=general;vtkGenericFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkGraphics;
-
-//Dependencies for the target
-vtkGeovisPythonD_LIB_DEPENDS:STATIC=general;vtkGeovis;general;vtkWidgetsPythonD;general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovisPython_LIB_DEPENDS:STATIC=general;vtkGeovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkViews;general;vtkproj4;
-
-//Dependencies for the target
-vtkGraphicsPythonD_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkverdict;
-
-//Dependencies for the target
-vtkHybridPythonD_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;vtkexoIIc;
-
-//Dependencies for the target
-vtkIOPythonD_LIB_DEPENDS:STATIC=general;vtkIO;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkDICOMParser;general;vtkNetCDF;general;vtkmetaio;general;vtksqlite;general;vtkpng;general;vtkzlib;general;vtkjpeg;general;vtktiff;general;vtkexpat;general;vtksys;
-
-//Dependencies for the target
-vtkImagingPythonD_LIB_DEPENDS:STATIC=general;vtkImaging;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=general;vtkFiltering;
-
-//Dependencies for the target
-vtkInfovisPythonD_LIB_DEPENDS:STATIC=general;vtkInfovis;general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovisPython_LIB_DEPENDS:STATIC=general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtklibxml2;general;vtkalglib;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkGraphicsPythonD;general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=general;vtkRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkImaging;general;vtkIO;general;vtkftgl;general;vtkfreetype;general;vtkzlib;general;vtkpng;general;/usr/X11R6/lib/libXt.dylib;general;/usr/X11R6/lib/libSM.dylib;general;/usr/X11R6/lib/libICE.dylib;general;/usr/X11R6/lib/libX11.dylib;general;/usr/X11R6/lib/libXext.dylib;general;/usr/X11R6/lib/libXss.dylib;general;/usr/X11R6/lib/libXft.dylib;general;/usr/X11R6/lib/libfontconfig.dylib;
-
-//Dependencies for the target
-vtkViewsPythonD_LIB_DEPENDS:STATIC=general;vtkViews;general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViewsPython_LIB_DEPENDS:STATIC=general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViews_LIB_DEPENDS:STATIC=general;vtkInfovis;
-
-//Dependencies for the target
-vtkVolumeRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkVolumeRendering;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=general;vtkVolumeRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;
-
-//Dependencies for the target
-vtkWidgetsPythonD_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkRenderingPythonD;general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkHybrid;
-
-//Dependencies for target
-vtkalglib_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=general;vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkfreetype_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=general;/usr/X11R6/lib/libGL.dylib;general;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtklibxml2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibxml2
-
-//Dependencies for the target
-vtklibxml2_LIB_DEPENDS:STATIC=general;vtkzlib;general;dl;general;-lpthread;general;m;
-
-//Value Computed by CMake
-vtklibxml2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibxml2
-
-//Value Computed by CMake
-vtkmetaio_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkmetaio_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtksys;
-
-//Value Computed by CMake
-vtkmetaio_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=general;vtkzlib;
-
-//Dependencies for the target
-vtkproj4_LIB_DEPENDS:STATIC=general;m;
-
-//Dependencies for target
-vtksqlite_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtkjpeg;
-
-//Dependencies for target
-vtkverdict_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-ALGLIB_SHARED_LIB:INTERNAL=ON
-//ADVANCED property for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: BUILD_EXAMPLES
-BUILD_EXAMPLES-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_SHARED_LIBS
-BUILD_SHARED_LIBS-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_TESTING
-BUILD_TESTING-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: BZRCOMMAND
-BZRCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/make
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=@BUILD_DIR@/VTK-build
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=0
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=cpack
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_C_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_CXX_ABI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_C_ABI_COMPILED:INTERNAL=TRUE
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=Unknown
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Result of TRY_COMPILE
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=TRUE
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include CMAKE_HAVE_LIMITS_H
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include CMAKE_HAVE_PTHREAD_H
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have includes CMAKE_HAVE_SYS_PRCTL_H
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=
-//Have include CMAKE_HAVE_UNISTD_H
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=@BUILD_DIR@/VTK
-//ADVANCED property for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_INSTALL_NAME_TOOL
-CMAKE_INSTALL_NAME_TOOL-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: CMAKE_INSTALL_PREFIX
-CMAKE_INSTALL_PREFIX-MODIFIED:INTERNAL=ON
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support sstream
-CMAKE_NO_ANSI_STRING_STREAM:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=44
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Test Support for 64 bit file systems
-CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=@EXTERNALS@/share/cmake-2.8
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_VOID_P:INTERNAL=4
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//Suppress Warnings that are meant for the author of the CMakeLists.txt
-// files.
-CMAKE_SUPPRESS_DEVELOPER_WARNINGS:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TEST_BIG_ENDIAN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//ADVANCED property for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//CXX compiler accepts flag -no-cpp-precomp
-CXX_HAS_CPP_PRECOMP_FLAG:INTERNAL=TRUE
-//ADVANCED property for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//Have include malloc.h
-EX_HAVE_MALLOC_H:INTERNAL=
-//Details about finding PythonInterp
-FIND_PACKAGE_MESSAGE_DETAILS_PythonInterp:INTERNAL=[@PREFIX_PATH@/bin/python@PYVER@]
-//Details about finding PythonLibs
-FIND_PACKAGE_MESSAGE_DETAILS_PythonLibs:INTERNAL=[@PREFIX_PATH@/lib/libpython@PYVER@.dylib][@PREFIX_PATH@/include/python@PYVER@]
-//Details about finding TCL
-FIND_PACKAGE_MESSAGE_DETAILS_TCL:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TCLTK
-FIND_PACKAGE_MESSAGE_DETAILS_TCLTK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include][@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TK
-FIND_PACKAGE_MESSAGE_DETAILS_TK:INTERNAL=[@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding Tclsh
-FIND_PACKAGE_MESSAGE_DETAILS_Tclsh:INTERNAL=[@EXTERNALS@/bin/tclsh@TCLTK_VERSION@]
-//Details about finding Threads
-FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE]
-//Details about finding X11
-FIND_PACKAGE_MESSAGE_DETAILS_X11:INTERNAL=[/usr/X11R6/lib/libX11.dylib][/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include]
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Have includes HAVE_ANSIDECL_H
-HAVE_ANSIDECL_H:INTERNAL=
-//Have include HAVE_ARPA_INET_H
-HAVE_ARPA_INET_H:INTERNAL=1
-//Have include HAVE_ARPA_NAMESER_H
-HAVE_ARPA_NAMESER_H:INTERNAL=1
-//Have include HAVE_ASSERT_H
-HAVE_ASSERT_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include HAVE_CTYPE_H
-HAVE_CTYPE_H:INTERNAL=1
-//Have include HAVE_DIRENT_H
-HAVE_DIRENT_H:INTERNAL=1
-//Have include HAVE_DLFCN_H
-HAVE_DLFCN_H:INTERNAL=1
-//Have library dl;-lpthread;m
-HAVE_DLOPEN:INTERNAL=1
-//Have includes HAVE_DL_H
-HAVE_DL_H:INTERNAL=
-//Have include HAVE_ERRNO_H
-HAVE_ERRNO_H:INTERNAL=1
-//Have include HAVE_FCNTL_H
-HAVE_FCNTL_H:INTERNAL=1
-//Have symbol finite
-HAVE_FINITE:INTERNAL=1
-//Have include HAVE_FLOAT_H
-HAVE_FLOAT_H:INTERNAL=1
-//Have function floor
-HAVE_FLOOR:INTERNAL=1
-//Have symbol fpclass
-HAVE_FPCLASS:INTERNAL=
-//Have symbol fprintf
-HAVE_FPRINTF:INTERNAL=1
-//Have symbol fp_class
-HAVE_FP_CLASS:INTERNAL=
-//Have includes HAVE_FP_CLASS_H
-HAVE_FP_CLASS_H:INTERNAL=
-//Have symbol ftime
-HAVE_FTIME:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_GETADDRINFO_COMPILED:INTERNAL=TRUE
-//Have function getopt
-HAVE_GETOPT:INTERNAL=1
-//Have symbol gettimeofday
-HAVE_GETTIMEOFDAY:INTERNAL=1
-//Have includes HAVE_IEEEFP_H
-HAVE_IEEEFP_H:INTERNAL=
-//Have include HAVE_INTTYPES_H
-HAVE_INTTYPES_H:INTERNAL=1
-//Have function isascii
-HAVE_ISASCII:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF___INT64:INTERNAL=FALSE
-//Have library dl;
-HAVE_LIBDL:INTERNAL=1
-//Have include HAVE_LIMITS_H
-HAVE_LIMITS_H:INTERNAL=1
-//Have symbol localtime
-HAVE_LOCALTIME:INTERNAL=1
-//Have includes HAVE_MALLOC_H
-HAVE_MALLOC_H:INTERNAL=
-//Have include HAVE_MATH_H
-HAVE_MATH_H:INTERNAL=1
-//Have function memmove
-HAVE_MEMMOVE:INTERNAL=1
-//Have include HAVE_MEMORY_H
-HAVE_MEMORY_H:INTERNAL=1
-//Have function memset
-HAVE_MEMSET:INTERNAL=1
-//Have function mmap
-HAVE_MMAP:INTERNAL=1
-//Have includes HAVE_NAN_H
-HAVE_NAN_H:INTERNAL=
-//Have includes HAVE_NDIR_H
-HAVE_NDIR_H:INTERNAL=
-//Have include HAVE_NETDB_H
-HAVE_NETDB_H:INTERNAL=1
-//Have include HAVE_NETINET_IN_H
-HAVE_NETINET_IN_H:INTERNAL=1
-//Have function pow
-HAVE_POW:INTERNAL=1
-//Have symbol printf
-HAVE_PRINTF:INTERNAL=1
-//Have include HAVE_PTHREAD_H
-HAVE_PTHREAD_H:INTERNAL=1
-//Have include HAVE_RESOLV_H
-HAVE_RESOLV_H:INTERNAL=1
-//Have library dld;dl
-HAVE_SHLLOAD:INTERNAL=
-//Have symbol signal
-HAVE_SIGNAL:INTERNAL=1
-//Have include HAVE_SIGNAL_H
-HAVE_SIGNAL_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UCHAR:INTERNAL=FALSE
-//Have symbol snprintf
-HAVE_SNPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SOCKLEN_T_COMPILED:INTERNAL=TRUE
-//Have symbol sprintf
-HAVE_SPRINTF:INTERNAL=1
-//Have function sqrt
-HAVE_SQRT:INTERNAL=1
-//Have symbol sscanf
-HAVE_SSCANF:INTERNAL=1
-//Have symbol stat
-HAVE_STAT:INTERNAL=1
-//Have include HAVE_STDARG_H
-HAVE_STDARG_H:INTERNAL=1
-//Have include stddef.h
-HAVE_STDDEF_H:INTERNAL=1
-//Have include stdint.h
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have function strcasecmp
-HAVE_STRCASECMP:INTERNAL=1
-//Have function strchr
-HAVE_STRCHR:INTERNAL=1
-//Have symbol strdup
-HAVE_STRDUP:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have symbol strftime
-HAVE_STRFTIME:INTERNAL=1
-//Have include HAVE_STRINGS_H
-HAVE_STRINGS_H:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//Have symbol strndup
-HAVE_STRNDUP:INTERNAL=
-//Have function strrchr
-HAVE_STRRCHR:INTERNAL=1
-//Have function strstr
-HAVE_STRSTR:INTERNAL=1
-//Have function strtol
-HAVE_STRTOL:INTERNAL=1
-//Have function areroul
-HAVE_STRTOUL:INTERNAL=
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_DIR_H_COMPILED:INTERNAL=TRUE
-//Have include HAVE_SYS_MMAN_H
-HAVE_SYS_MMAN_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_NDIR_H_COMPILED:INTERNAL=FALSE
-//Have include HAVE_SYS_SELECT_H
-HAVE_SYS_SELECT_H:INTERNAL=1
-//Have include HAVE_SYS_SOCKET_H
-HAVE_SYS_SOCKET_H:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TIMEB_H
-HAVE_SYS_TIMEB_H:INTERNAL=1
-//Have include HAVE_SYS_TIME_H
-HAVE_SYS_TIME_H:INTERNAL=1
-//Have include sys/types.h
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include HAVE_TIME_H
-HAVE_TIME_H:INTERNAL=1
-//Have include HAVE_UNISTD_H
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VA_COPY_COMPILED:INTERNAL=TRUE
-//Have symbol vfprintf
-HAVE_VFPRINTF:INTERNAL=1
-//Have symbol vsnprintf
-HAVE_VSNPRINTF:INTERNAL=1
-//Have symbol vsprintf
-HAVE_VSPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Have includes HAVE_WINDOWS_H
-HAVE_WINDOWS_H:INTERNAL=
-//Have symbol _stat
-HAVE__STAT:INTERNAL=
-//Result of TRY_COMPILE
-HAVE___VA_COPY_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: HGCOMMAND
-HGCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-KWSYS_CHAR_IS_SIGNED:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_PTRDIFF_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_SSIZE_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-KWSYS_LFS_WORKS:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_LFS_WORKS_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF___INT64:INTERNAL=
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//ADVANCED property for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//Path to vtkMaterialLibraryMacro.h
-MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h
-//Path to vtkShaderCodeLibraryMacro.h
-MATERIAL_LIBRARY_SHADER_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h
-//ADVANCED property for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_xmesa_INCLUDE_DIR
-OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Have symbol atanh
-PROJ_HAVE_ATANH:INTERNAL=1
-//Have symbol csin
-PROJ_HAVE_COMPLEX:INTERNAL=1
-//ADVANCED property for variable: PROJ_LIST_EXTERNAL
-PROJ_LIST_EXTERNAL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_GSL
-PROJ_USE_GSL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_PTHREADS
-PROJ_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_DEBUG_LIBRARY
-PYTHON_DEBUG_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXTRA_LIBS
-PYTHON_EXTRA_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_MODULE_vtkCommonPython_BUILD_SHARED
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkHybridPython_BUILD_SHARED
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkIOPython_BUILD_SHARED
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkImagingPython_BUILD_SHARED
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkViewsPython_BUILD_SHARED
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-SHARED_LIBRARY_PATH_INFO_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-SHARED_LIBRARY_PATH_TYPE:INTERNAL=0
-//runtime library path variable name.
-SHARED_LIBRARY_PATH_VAR_NAME:INTERNAL=DYLD_LIBRARY_PATH
-//ADVANCED property for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_OFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_PTRDIFF_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SSIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_UCHAR:INTERNAL=
-//ADVANCED property for variable: SLURM_SBATCH_COMMAND
-SLURM_SBATCH_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SLURM_SRUN_COMMAND
-SLURM_SRUN_COMMAND-ADVANCED:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//Result of TRY_COMPILE
-SUPPORT_IP6_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: SVNCOMMAND
-SVNCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_LIBRARY
-TCL_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_LIBRARY
-TCL_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_TCLSH
-TCL_TCLSH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TK_LIBRARY
-TK_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_LIBRARY
-TK_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_WISH
-TK_WISH-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//ADVANCED property for variable: VERDICT_BUILD_DOC
-VERDICT_BUILD_DOC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_ENABLE_TESTING
-VERDICT_ENABLE_TESTING-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE
-VERDICT_MANGLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE_PREFIX
-VERDICT_MANGLE_PREFIX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_USE_FLOAT
-VERDICT_USE_FLOAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VLI_LIBRARY_FOR_VP1000
-VLI_LIBRARY_FOR_VP1000-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//ADVANCED property for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//The directory in which code for Shaders is provided.
-VTK_DEFAULT_SHADERS_DIR:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-//String encoder.
-VTK_ENCODESTRING_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkEncodeString
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//ADVANCED property for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Support for getsockname with socklen_t
-VTK_HAVE_GETSOCKNAME_WITH_SOCKLEN_T:INTERNAL=1
-//Have library socket
-VTK_HAVE_LIBSOCKET:INTERNAL=
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Have symbol SO_REUSEADDR
-VTK_HAVE_SO_REUSEADDR:INTERNAL=1
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_MATERIALS_DIRS
-VTK_MATERIALS_DIRS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_NO_PYTHON_THREADS
-VTK_NO_PYTHON_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkParseOGLExt
-//Install directory for Python .py and .pyc files
-VTK_PYTHON_MODULE_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//Install directory for Python binary modules
-VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//ADVANCED property for variable: VTK_PYTHON_SETUP_ARGS
-VTK_PYTHON_SETUP_ARGS-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF___INT64:INTERNAL=
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=
-//ADVANCED property for variable: VTK_TESTING_USE_FPE
-VTK_TESTING_USE_FPE-ADVANCED:INTERNAL=1
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_BOOST
-VTK_USE_BOOST-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_CG_SHADERS
-VTK_USE_CG_SHADERS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_COCOA
-VTK_USE_COCOA-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_FFMPEG_ENCODER
-VTK_USE_FFMPEG_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_GLSL_SHADERS
-VTK_USE_GLSL_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_METAIO
-VTK_USE_METAIO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MPEG2_ENCODER
-VTK_USE_MPEG2_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MYSQL
-VTK_USE_MYSQL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_ODBC
-VTK_USE_ODBC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_POSTGRES
-VTK_USE_POSTGRES-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBPROJ4
-VTK_USE_SYSTEM_LIBPROJ4-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBXML2
-VTK_USE_SYSTEM_LIBXML2-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TK
-VTK_USE_TK-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_VOLUMEPRO_1000
-VTK_USE_VOLUMEPRO_1000-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_X
-VTK_USE_X-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_WRAP_PYTHON
-VTK_WRAP_PYTHON-MODIFIED:INTERNAL=ON
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPython
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_INIT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPythonInit
-//ADVANCED property for variable: X11_ICE_INCLUDE_PATH
-X11_ICE_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_ICE_LIB
-X11_ICE_LIB-ADVANCED:INTERNAL=1
-//Have library /usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-X11_LIB_X11_SOLO:INTERNAL=1
-//ADVANCED property for variable: X11_SM_LIB
-X11_SM_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XShm_INCLUDE_PATH
-X11_XShm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_INCLUDE_PATH
-X11_XTest_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_LIB
-X11_XTest_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessrules_INCLUDE_PATH
-X11_Xaccessrules_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessstr_INCLUDE_PATH
-X11_Xaccessstr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_INCLUDE_PATH
-X11_Xau_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_LIB
-X11_Xau_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_INCLUDE_PATH
-X11_Xcomposite_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_LIB
-X11_Xcomposite_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_INCLUDE_PATH
-X11_Xcursor_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_LIB
-X11_Xcursor_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_INCLUDE_PATH
-X11_Xdamage_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_LIB
-X11_Xdamage_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_INCLUDE_PATH
-X11_Xdmcp_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_LIB
-X11_Xdmcp_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_INCLUDE_PATH
-X11_Xfixes_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_LIB
-X11_Xfixes_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_INCLUDE_PATH
-X11_Xft_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_LIB
-X11_Xft_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_INCLUDE_PATH
-X11_Xinerama_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_LIB
-X11_Xinerama_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_INCLUDE_PATH
-X11_Xinput_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_LIB
-X11_Xinput_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkb_INCLUDE_PATH
-X11_Xkb_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkblib_INCLUDE_PATH
-X11_Xkblib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_INCLUDE_PATH
-X11_Xpm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_LIB
-X11_Xpm_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_INCLUDE_PATH
-X11_Xrandr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_LIB
-X11_Xrandr_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_INCLUDE_PATH
-X11_Xrender_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_LIB
-X11_Xrender_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_INCLUDE_PATH
-X11_Xscreensaver_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_LIB
-X11_Xscreensaver_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xshape_INCLUDE_PATH
-X11_Xshape_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_INCLUDE_PATH
-X11_Xt_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_LIB
-X11_Xt_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_INCLUDE_PATH
-X11_Xv_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_LIB
-X11_Xv_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xxf86misc_LIB
-X11_Xxf86misc_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_dpms_INCLUDE_PATH
-X11_dpms_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_fontconfig_LIB
-X11_fontconfig_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86misc_INCLUDE_PATH
-X11_xf86misc_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86vmode_INCLUDE_PATH
-X11_xf86vmode_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Already complained about update type.
-__CTEST_UPDATE_TYPE_COMPLAINED:INTERNAL=1
-
diff --git a/exsrc/src/CMakeCache.txt.mac.Framework.in b/exsrc/src/CMakeCache.txt.mac.Framework.in
deleted file mode 100644
index 9764e5a524be050fde275b07279bd6ae7ae2b22a..0000000000000000000000000000000000000000
--- a/exsrc/src/CMakeCache.txt.mac.Framework.in
+++ /dev/null
@@ -1,2066 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: @BUILD_DIR@/VTK-build
-# It was generated by CMake: @EXTERNALS@/bin/cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Path to a library.
-ApplicationServices:FILEPATH=/System/Library/Frameworks/ApplicationServices.framework
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=OFF
-
-//Build Verdict with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=ON
-
-//Path to a program.
-BZRCOMMAND:FILEPATH=BZRCOMMAND-NOTFOUND
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=
-
-//Path to a program.
-CMAKE_INSTALL_NAME_TOOL:FILEPATH=/usr/bin/install_name_tool
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=@PREFIX_PATH@
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=CMAKE_OBJCOPY-NOTFOUND
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=CMAKE_OBJDUMP-NOTFOUND
-
-//Build architectures for OSX
-CMAKE_OSX_ARCHITECTURES:STRING=
-
-//Minimum OS X version to target for deployment (at runtime); newer
-// APIs weak linked. Set to empty string for default value.
-CMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.6
-
-//The product will be built against the headers and libraries located
-// inside the indicated SDK.
-CMAKE_OSX_SYSROOT:PATH=/Developer/SDKs/MacOSX10.6.sdk
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=VTK
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=1
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=/usr/X11R6/lib/libSM.dylib;/usr/X11R6/lib/libICE.dylib;/usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-
-//Path to the coverage program that CTest uses for performing coverage
-// inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Maximum time allowed before CTest will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/DICOMParser
-
-//Path to a program.
-HGCOMMAND:FILEPATH=HGCOMMAND-NOTFOUND
-
-//Path to a library.
-IOKit:FILEPATH=/System/Library/Frameworks/IOKit.framework
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/make -i -j 16
-
-//Value Computed by CMake
-MAPREDUCE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/mrmpi
-
-//Value Computed by CMake
-MAPREDUCE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/mrmpi
-
-//Path to the memory checking command, used for memory error detection.
-MEMORYCHECK_COMMAND:FILEPATH=MEMORYCHECK_COMMAND-NOTFOUND
-
-//File that contains suppressions for the memory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//Dependencies for the target
-MapReduceMPI_LIB_DEPENDS:STATIC=general;mpistubs;
-
-//Value Computed by CMake
-MaterialLibrary_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary
-
-//Value Computed by CMake
-MaterialLibrary_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/MaterialLibrary
-
-//Include for OpenGL on OSX
-OPENGL_INCLUDE_DIR:PATH=/System/Library/Frameworks/OpenGL.framework
-
-//OpenGL lib for OSX
-OPENGL_gl_LIBRARY:FILEPATH=/System/Library/Frameworks/OpenGL.framework
-
-//AGL lib for OSX
-OPENGL_glu_LIBRARY:FILEPATH=/System/Library/Frameworks/AGL.framework
-
-//Does an external project define proj_list or should libproj4
-// define it?
-PROJ_LIST_EXTERNAL:BOOL=OFF
-
-//Should libproj4 include projection code that relies on GSL?
-PROJ_USE_GSL:BOOL=OFF
-
-//Should libproj4 be built as a thread-friendly library?
-PROJ_USE_PTHREADS:BOOL=OFF
-
-//Add module vtkChartsPython
-PYTHON_ENABLE_MODULE_vtkChartsPython:BOOL=ON
-
-//Add module vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython:BOOL=ON
-
-//Add module vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython:BOOL=ON
-
-//Add module vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython:BOOL=ON
-
-//Add module vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython:BOOL=ON
-
-//Add module vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython:BOOL=ON
-
-//Add module vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython:BOOL=ON
-
-//Add module vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython:BOOL=ON
-
-//Add module vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython:BOOL=ON
-
-//Add module vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython:BOOL=ON
-
-//Add module vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython:BOOL=ON
-
-//Add module vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython:BOOL=ON
-
-//Add module vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython:BOOL=ON
-
-//Add module vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython:BOOL=ON
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=@PREFIX_PATH@/bin/python@PYVER@
-
-//Extra libraries to link when linking to python (such as "z" for
-// zlib).  Separate multiple libraries with semicolons.
-PYTHON_EXTRA_LIBS:STRING=
-
-//Path to a file.
-PYTHON_INCLUDE_DIR:PATH=@PREFIX_PATH@/Python.framework/Headers
-
-//Path to a library.
-PYTHON_LIBRARY:FILEPATH=@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib
-
-//Add module vtkChartsPython shared
-PYTHON_MODULE_vtkChartsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkCommonPython shared
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkFilteringPython shared
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGenericFilteringPython shared
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGeovisPython shared
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGraphicsPython shared
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkHybridPython shared
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkIOPython shared
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkImagingPython shared
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkInfovisPython shared
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkRenderingPython shared
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkViewsPython shared
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkVolumeRenderingPython shared
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkWidgetsPython shared
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED:BOOL=ON
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.dylib
-
-//Path to scp command, used by CTest for submitting results to
-// a Dart server
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=meryem.llnl.gov
-
-//Path to the SLURM sbatch executable
-SLURM_SBATCH_COMMAND:FILEPATH=SLURM_SBATCH_COMMAND-NOTFOUND
-
-//Path to the SLURM srun executable
-SLURM_SRUN_COMMAND:FILEPATH=SLURM_SRUN_COMMAND-NOTFOUND
-
-//Path to a program.
-SVNCOMMAND:FILEPATH=/usr/bin/svn
-
-//Path to a file.
-TCL_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TCL_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=@EXTERNALS@/bin/tclsh@TCLTK_VERSION@
-
-//Path to a file.
-TK_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//The path to the Tk internal headers (tkMacOSXDefault.h).
-TK_INTERNAL_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TK_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Build the 2007 Verdict User Manual
-VERDICT_BUILD_DOC:BOOL=OFF
-
-//Should tests of the VERDICT library be built?
-VERDICT_ENABLE_TESTING:BOOL=OFF
-
-//Mangle verdict names for inclusion in a larger library?
-VERDICT_MANGLE:BOOL=ON
-
-//VTK requires the verdict prefix to be vtk
-VERDICT_MANGLE_PREFIX:STRING=vtk
-
-//VTK requires doubles
-VERDICT_USE_FLOAT:BOOL=OFF
-
-//Path to a library.
-VLI_LIBRARY_FOR_VP1000:FILEPATH=VLI_LIBRARY_FOR_VP1000-NOTFOUND
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build
-
-//The repository for data used for testing.  To obtain from CVS:
-// "cvs -d :pserver:anoncvs@www.vtk.org:/cvsroot/VTK co VTKData"
-VTK_DATA_ROOT:PATH=VTK_DATA_ROOT-NOTFOUND
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Add compiler flags to do stricter checking when building debug.
-VTK_EXTRA_COMPILER_WARNINGS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//The repository for large data used for testing.  To check out
-// this repository from CVS, first run
-//\n
-//\ncvs -d :pserver:anonymous@public.kitware.com:/cvsroot/VTK login
-//\n
-//\n(respond with password vtk) and then run
-//\n
-//\ncvs -d :pserver:anonymous@public.kitware.com:/cvsroot/VTK checkout
-// VTKLargeData
-VTK_LARGE_DATA_ROOT:PATH=VTK_LARGE_DATA_ROOT-NOTFOUND
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//; separated directories to search for materials/shaders
-VTK_MATERIALS_DIRS:STRING=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-
-//Disable multithreading support in the Python bindings
-VTK_NO_PYTHON_THREADS:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Arguments passed to "python setup.py install ..." during installation.
-VTK_PYTHON_SETUP_ARGS:STRING=--prefix="${CMAKE_INSTALL_PREFIX}"
-
-//Extra flags for Objective C compilation
-VTK_REQUIRED_OBJCXX_FLAGS:STRING=-fobjc-gc
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK
-
-//Locale for VTK C++ tests. Example fr_FR.utf8, empty string for
-// env variable.
-VTK_TESTING_LOCALE:STRING=
-
-//VTK tests call vtkFloatingPointExceptions::Enable()
-VTK_TESTING_USE_FPE:BOOL=ON
-
-//VTK c++ tests will start with specified locale.
-VTK_TESTING_USE_LOCALE:BOOL=OFF
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=ON
-
-//Use Boost libraries for graph algorithms - www.boost.org.
-VTK_USE_BOOST:BOOL=OFF
-
-//Build classes using Carbon API.
-VTK_USE_CARBON:BOOL=OFF
-
-//Build pixel and vertex shader support for Cg.
-VTK_USE_CG_SHADERS:BOOL=OFF
-
-//Build VTK chart support (OpenGL based)
-VTK_USE_CHARTS:BOOL=ON
-
-//Build classes using Cocoa API.
-VTK_USE_COCOA:BOOL=ON
-
-//Turn this option off and tests and warning/error macros will
-// not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//If the FFMPEG library is available, should VTK use it for saving
-// .avi animation files?
-VTK_USE_FFMPEG_ENCODER:BOOL=OFF
-
-//Use GCC visibility support if available.
-VTK_USE_GCC_VISIBILITY:BOOL=ON
-
-//Build the vtkGeovis kit.  Needed for performing geographic visualization.
-VTK_USE_GEOVIS:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build pixel and vertex shader support for GLSL.
-VTK_USE_GLSL_SHADERS:BOOL=ON
-
-//Use Gnu R interface for VTK.  Requires Gnu R installation.
-VTK_USE_GNU_R:BOOL=OFF
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Build the vtkInfovis kit.  Needed for performing information
-// visualization.
-VTK_USE_INFOVIS:BOOL=ON
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Use Matlab Engine and Matlab Mex files.  Requires valid Matlab
-// installation.
-VTK_USE_MATLAB_MEX:BOOL=OFF
-
-//Build metaio
-VTK_USE_METAIO:BOOL=ON
-
-//Enable use of the patented mpeg2 library. You are solely responsible
-// for any legal issues associated with using patented code in
-// your software.
-VTK_USE_MPEG2_ENCODER:BOOL=OFF
-
-//Build the MySQL driver for vtkSQLDatabase.
-VTK_USE_MYSQL:BOOL=OFF
-
-//Add support for arbitrary-dimension sparse and dense arrays.
-VTK_USE_N_WAY_ARRAYS:BOOL=ON
-
-//Build the ODBC database interface
-VTK_USE_ODBC:BOOL=OFF
-
-//Build experimental Ogg/Theora support
-VTK_USE_OGGTHEORA_ENCODER:BOOL=OFF
-
-//Build openfoam reader
-VTK_USE_OPENFOAM:BOOL=ON
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the PostgreSQL driver for vtkSQLDatabase.
-VTK_USE_POSTGRES:BOOL=OFF
-
-//Build Qt support
-VTK_USE_QT:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's gl2ps library.
-VTK_USE_SYSTEM_GL2PS:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's proj4 library.
-VTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF
-
-//Use the system's libxml2 library.
-VTK_USE_SYSTEM_LIBXML2:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Use 3Dconnexion device
-VTK_USE_TDX:BOOL=OFF
-
-//Build the vtkTextAnalysis kit.  Needed for performing text analysis.
-VTK_USE_TEXT_ANALYSIS:BOOL=OFF
-
-//Build VTK with Tk support
-VTK_USE_TK:BOOL=OFF
-
-//Build the vtkViews kit.  Needed for creating packaged and linked
-// views.
-VTK_USE_VIEWS:BOOL=ON
-
-//Enable support for VolumePro 1000.
-VTK_USE_VOLUMEPRO_1000:BOOL=OFF
-
-//Build classes for the X11 window system.
-VTK_USE_X:BOOL=OFF
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Path to a file.
-VTK_WRAP_HINTS:FILEPATH=@BUILD_DIR@/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=OFF
-
-//Path to a file.
-X11_ICE_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_ICE_LIB:FILEPATH=/usr/X11R6/lib/libICE.dylib
-
-//Path to a library.
-X11_SM_LIB:FILEPATH=/usr/X11R6/lib/libSM.dylib
-
-//Path to a file.
-X11_X11_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.dylib
-
-//Path to a file.
-X11_XShm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_XTest_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_XTest_LIB:FILEPATH=/usr/X11R6/lib/libXtst.dylib
-
-//Path to a file.
-X11_Xaccessrules_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xaccessstr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xau_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xau_LIB:FILEPATH=/usr/X11R6/lib/libXau.dylib
-
-//Path to a file.
-X11_Xcomposite_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcomposite_LIB:FILEPATH=/usr/X11R6/lib/libXcomposite.dylib
-
-//Path to a file.
-X11_Xcursor_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcursor_LIB:FILEPATH=/usr/X11R6/lib/libXcursor.dylib
-
-//Path to a file.
-X11_Xdamage_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdamage_LIB:FILEPATH=/usr/X11R6/lib/libXdamage.dylib
-
-//Path to a file.
-X11_Xdmcp_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdmcp_LIB:FILEPATH=/usr/X11R6/lib/libXdmcp.dylib
-
-//Path to a library.
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.dylib
-
-//Path to a file.
-X11_Xfixes_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xfixes_LIB:FILEPATH=/usr/X11R6/lib/libXfixes.dylib
-
-//Path to a file.
-X11_Xft_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xft_LIB:FILEPATH=/usr/X11R6/lib/libXft.dylib
-
-//Path to a file.
-X11_Xinerama_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinerama_LIB:FILEPATH=/usr/X11R6/lib/libXinerama.dylib
-
-//Path to a file.
-X11_Xinput_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinput_LIB:FILEPATH=/usr/X11R6/lib/libXi.dylib
-
-//Path to a file.
-X11_Xkb_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xkblib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xlib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xpm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xpm_LIB:FILEPATH=/usr/X11R6/lib/libXpm.dylib
-
-//Path to a file.
-X11_Xrandr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrandr_LIB:FILEPATH=/usr/X11R6/lib/libXrandr.dylib
-
-//Path to a file.
-X11_Xrender_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrender_LIB:FILEPATH=/usr/X11R6/lib/libXrender.dylib
-
-//Path to a file.
-X11_Xscreensaver_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xscreensaver_LIB:FILEPATH=/usr/X11R6/lib/libXss.dylib
-
-//Path to a file.
-X11_Xshape_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xt_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xt_LIB:FILEPATH=/usr/X11R6/lib/libXt.dylib
-
-//Path to a file.
-X11_Xutil_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xv_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xv_LIB:FILEPATH=/usr/X11R6/lib/libXv.dylib
-
-//Path to a library.
-X11_Xxf86misc_LIB:FILEPATH=/usr/X11R6/lib/libXxf86misc.dylib
-
-//Path to a file.
-X11_dpms_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86misc_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86vmode_INCLUDE_PATH:PATH=/usr/include
-
-//Value Computed by CMake
-alglib_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkalglib
-
-//Value Computed by CMake
-alglib_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkalglib
-
-//Value Computed by CMake
-libproj4_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibproj4
-
-//Value Computed by CMake
-libproj4_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibproj4
-
-//Dependencies for the target
-mpistubs_LIB_DEPENDS:STATIC=general;vtksys;
-
-//Value Computed by CMake
-verdict_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/verdict
-
-//Value Computed by CMake
-verdict_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/verdict
-
-//Dependencies for the target
-vtkChartsPythonD_LIB_DEPENDS:STATIC=general;vtkCharts;general;vtkHybridPythonD;general;vtkViewsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkChartsPython_LIB_DEPENDS:STATIC=general;vtkChartsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkCharts_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkViews;general;vtkIO;general;vtkftgl;general;vtkfreetype;
-
-//Dependencies for the target
-vtkCommonPythonD_LIB_DEPENDS:STATIC=general;vtkCommon;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=general;vtkCommonPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=general;vtksys;general;-lm;general;-lpthread;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkCommonPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=general;vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkGenericFiltering;general;vtkFilteringPythonD;general;vtkGraphicsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=general;vtkGenericFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkGraphics;
-
-//Dependencies for the target
-vtkGeovisPythonD_LIB_DEPENDS:STATIC=general;vtkGeovis;general;vtkWidgetsPythonD;general;vtkViewsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGeovisPython_LIB_DEPENDS:STATIC=general;vtkGeovisPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGeovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkViews;general;vtkproj4;general;/System/Library/Frameworks/AGL.framework;general;/System/Library/Frameworks/OpenGL.framework;
-
-//Dependencies for the target
-vtkGraphicsPythonD_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=general;vtkGraphicsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkverdict;
-
-//Dependencies for the target
-vtkHybridPythonD_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkRenderingPythonD;general;vtkIOPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=general;vtkHybridPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;vtkexoIIc;general;vtkftgl;
-
-//Dependencies for the target
-vtkIOPythonD_LIB_DEPENDS:STATIC=general;vtkIO;general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=general;vtkIOPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkDICOMParser;general;vtkNetCDF;general;vtkmetaio;general;vtksqlite;general;vtkpng;general;vtkzlib;general;vtkjpeg;general;vtktiff;general;vtkexpat;general;vtksys;
-
-//Dependencies for the target
-vtkImagingPythonD_LIB_DEPENDS:STATIC=general;vtkImaging;general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=general;vtkImagingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=general;vtkFiltering;
-
-//Dependencies for the target
-vtkInfovisPythonD_LIB_DEPENDS:STATIC=general;vtkInfovis;general;vtkWidgetsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkInfovisPython_LIB_DEPENDS:STATIC=general;vtkInfovisPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkInfovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtklibxml2;general;vtkalglib;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkGraphicsPythonD;general;vtkImagingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=general;vtkRenderingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkImaging;general;objc;general;vtkIO;general;vtkftgl;general;vtkfreetype;general;vtkzlib;general;vtkpng;general;/System/Library/Frameworks/OpenGL.framework;general;-framework Cocoa;general;/System/Library/Frameworks/ApplicationServices.framework;general;/System/Library/Frameworks/IOKit.framework;
-
-//Dependencies for the target
-vtkViewsPythonD_LIB_DEPENDS:STATIC=general;vtkViews;general;vtkInfovisPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkViewsPython_LIB_DEPENDS:STATIC=general;vtkViewsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkViews_LIB_DEPENDS:STATIC=general;vtkInfovis;
-
-//Dependencies for the target
-vtkVolumeRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkVolumeRendering;general;vtkRenderingPythonD;general;vtkIOPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=general;vtkVolumeRenderingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;/System/Library/Frameworks/OpenGL.framework;
-
-//Dependencies for the target
-vtkWidgetsPythonD_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkRenderingPythonD;general;vtkHybridPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=general;vtkWidgetsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkHybrid;general;/System/Library/Frameworks/OpenGL.framework;
-
-//Dependencies for target
-vtkalglib_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=general;vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkfreetype_LIB_DEPENDS:STATIC=general;-framework ApplicationServices -framework CoreServices;
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=general;/System/Library/Frameworks/OpenGL.framework;general;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtklibxml2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibxml2
-
-//Dependencies for the target
-vtklibxml2_LIB_DEPENDS:STATIC=general;vtkzlib;general;dl;general;-lpthread;general;m;
-
-//Value Computed by CMake
-vtklibxml2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibxml2
-
-//Value Computed by CMake
-vtkmetaio_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkmetaio_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtksys;
-
-//Value Computed by CMake
-vtkmetaio_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=general;vtkzlib;general;-lm;
-
-//Dependencies for the target
-vtkproj4_LIB_DEPENDS:STATIC=general;m;
-
-//Dependencies for the target
-vtksqlite_LIB_DEPENDS:STATIC=general;-lpthread;
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtkjpeg;general;-lm;
-
-//Dependencies for target
-vtkverdict_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-ALGLIB_SHARED_LIB:INTERNAL=ON
-//ADVANCED property for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: BUILD_SHARED_LIBS
-BUILD_SHARED_LIBS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: BZRCOMMAND
-BZRCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/make
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=@BUILD_DIR@/VTK-build
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=0
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=@EXTERNALS@/bin/cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=@EXTERNALS@/bin/cpack
-//ADVANCED property for variable: CMAKE_CTEST_COMMAND
-CMAKE_CTEST_COMMAND-ADVANCED:INTERNAL=1
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=@EXTERNALS@/bin/ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_C_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_CXX_ABI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_C_ABI_COMPILED:INTERNAL=TRUE
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=@EXTERNALS@/bin/ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=Unknown
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Result of TRY_COMPILE
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=TRUE
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include CMAKE_HAVE_LIMITS_H
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include CMAKE_HAVE_PTHREAD_H
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have includes CMAKE_HAVE_SYS_PRCTL_H
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=
-//Have include CMAKE_HAVE_UNISTD_H
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=@BUILD_DIR@/VTK
-//ADVANCED property for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_INSTALL_NAME_TOOL
-CMAKE_INSTALL_NAME_TOOL-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: CMAKE_INSTALL_PREFIX
-CMAKE_INSTALL_PREFIX-MODIFIED:INTERNAL=ON
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support sstream
-CMAKE_NO_ANSI_STRING_STREAM:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=83
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Test Support for 64 bit file systems
-CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=@EXTERNALS@/share/cmake-2.8
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_VOID_P:INTERNAL=8
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TEST_BIG_ENDIAN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//ADVANCED property for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//CXX compiler accepts flag -no-cpp-precomp
-CXX_HAS_CPP_PRECOMP_FLAG:INTERNAL=TRUE
-//ADVANCED property for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//Have include malloc.h
-EX_HAVE_MALLOC_H:INTERNAL=
-//Details about finding PythonLibs
-FIND_PACKAGE_MESSAGE_DETAILS_PythonLibs:INTERNAL=[@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib][@PREFIX_PATH@/Python.framework/Headers]
-//Details about finding TCL
-FIND_PACKAGE_MESSAGE_DETAILS_TCL:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TCLTK
-FIND_PACKAGE_MESSAGE_DETAILS_TCLTK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include][@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TK
-FIND_PACKAGE_MESSAGE_DETAILS_TK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding Tclsh
-FIND_PACKAGE_MESSAGE_DETAILS_Tclsh:INTERNAL=[@EXTERNALS@/bin/tclsh@TCLTK_VERSION@]
-//Details about finding Threads
-FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE]
-//Details about finding X11
-FIND_PACKAGE_MESSAGE_DETAILS_X11:INTERNAL=[/usr/X11R6/lib/libX11.dylib][/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include]
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Have includes HAVE_ANSIDECL_H
-HAVE_ANSIDECL_H:INTERNAL=
-//Have include HAVE_ARPA_INET_H
-HAVE_ARPA_INET_H:INTERNAL=1
-//Have include HAVE_ARPA_NAMESER_H
-HAVE_ARPA_NAMESER_H:INTERNAL=1
-//Have include HAVE_ASSERT_H
-HAVE_ASSERT_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include HAVE_CTYPE_H
-HAVE_CTYPE_H:INTERNAL=1
-//Have include HAVE_DIRENT_H
-HAVE_DIRENT_H:INTERNAL=1
-//Have include HAVE_DLFCN_H
-HAVE_DLFCN_H:INTERNAL=1
-//Have library dl;-lpthread;m
-HAVE_DLOPEN:INTERNAL=1
-//Have includes HAVE_DL_H
-HAVE_DL_H:INTERNAL=
-//Have include HAVE_ERRNO_H
-HAVE_ERRNO_H:INTERNAL=1
-//Have include HAVE_FCNTL_H
-HAVE_FCNTL_H:INTERNAL=1
-//Have include fenv.h
-HAVE_FENV_H:INTERNAL=1
-//Have symbol finite
-HAVE_FINITE:INTERNAL=1
-//Have include HAVE_FLOAT_H
-HAVE_FLOAT_H:INTERNAL=1
-//Have function floor
-HAVE_FLOOR:INTERNAL=1
-//Have symbol fpclass
-HAVE_FPCLASS:INTERNAL=
-//Have symbol fprintf
-HAVE_FPRINTF:INTERNAL=1
-//Have symbol fp_class
-HAVE_FP_CLASS:INTERNAL=
-//Have includes HAVE_FP_CLASS_H
-HAVE_FP_CLASS_H:INTERNAL=
-//Have symbol ftime
-HAVE_FTIME:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Test HAVE_GCC_ERROR_RETURN_TYPE
-HAVE_GCC_ERROR_RETURN_TYPE:INTERNAL=1
-//Test HAVE_GCC_VISIBILITY
-HAVE_GCC_VISIBILITY:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_GETADDRINFO_COMPILED:INTERNAL=TRUE
-//Have function getopt
-HAVE_GETOPT:INTERNAL=1
-//Have symbol gettimeofday
-HAVE_GETTIMEOFDAY:INTERNAL=1
-//Have includes HAVE_IEEEFP_H
-HAVE_IEEEFP_H:INTERNAL=
-//Have include HAVE_INTTYPES_H
-HAVE_INTTYPES_H:INTERNAL=1
-//Have function isascii
-HAVE_ISASCII:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF___INT64:INTERNAL=FALSE
-//Have library dl;
-HAVE_LIBDL:INTERNAL=1
-//Have include HAVE_LIMITS_H
-HAVE_LIMITS_H:INTERNAL=1
-//Have symbol localtime
-HAVE_LOCALTIME:INTERNAL=1
-//Have includes HAVE_MALLOC_H
-HAVE_MALLOC_H:INTERNAL=
-//Have include HAVE_MATH_H
-HAVE_MATH_H:INTERNAL=1
-//Have function memmove
-HAVE_MEMMOVE:INTERNAL=1
-//Have include HAVE_MEMORY_H
-HAVE_MEMORY_H:INTERNAL=1
-//Have function memset
-HAVE_MEMSET:INTERNAL=1
-//Have function mmap
-HAVE_MMAP:INTERNAL=1
-//Have includes HAVE_NAN_H
-HAVE_NAN_H:INTERNAL=
-//Have includes HAVE_NDIR_H
-HAVE_NDIR_H:INTERNAL=
-//Have include HAVE_NETDB_H
-HAVE_NETDB_H:INTERNAL=1
-//Have include HAVE_NETINET_IN_H
-HAVE_NETINET_IN_H:INTERNAL=1
-//Have function pow
-HAVE_POW:INTERNAL=1
-//Have symbol printf
-HAVE_PRINTF:INTERNAL=1
-//Have include HAVE_PTHREAD_H
-HAVE_PTHREAD_H:INTERNAL=1
-//Have include HAVE_RESOLV_H
-HAVE_RESOLV_H:INTERNAL=1
-//Have library dld;dl
-HAVE_SHLLOAD:INTERNAL=
-//Have symbol signal
-HAVE_SIGNAL:INTERNAL=1
-//Have include HAVE_SIGNAL_H
-HAVE_SIGNAL_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UCHAR:INTERNAL=FALSE
-//Have symbol snprintf
-HAVE_SNPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SOCKLEN_T_COMPILED:INTERNAL=TRUE
-//Have symbol sprintf
-HAVE_SPRINTF:INTERNAL=1
-//Have function sqrt
-HAVE_SQRT:INTERNAL=1
-//Have symbol sscanf
-HAVE_SSCANF:INTERNAL=1
-//Have symbol stat
-HAVE_STAT:INTERNAL=1
-//Have include HAVE_STDARG_H
-HAVE_STDARG_H:INTERNAL=1
-//Have include stddef.h
-HAVE_STDDEF_H:INTERNAL=1
-//Have include stdint.h
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have function strcasecmp
-HAVE_STRCASECMP:INTERNAL=1
-//Have function strchr
-HAVE_STRCHR:INTERNAL=1
-//Have symbol strdup
-HAVE_STRDUP:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have symbol strftime
-HAVE_STRFTIME:INTERNAL=1
-//Have include HAVE_STRINGS_H
-HAVE_STRINGS_H:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//Have symbol strndup
-HAVE_STRNDUP:INTERNAL=
-//Have function strrchr
-HAVE_STRRCHR:INTERNAL=1
-//Have function strstr
-HAVE_STRSTR:INTERNAL=1
-//Have function strtol
-HAVE_STRTOL:INTERNAL=1
-//Have function areroul
-HAVE_STRTOUL:INTERNAL=
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_DIR_H_COMPILED:INTERNAL=TRUE
-//Have include HAVE_SYS_MMAN_H
-HAVE_SYS_MMAN_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_NDIR_H_COMPILED:INTERNAL=FALSE
-//Have include HAVE_SYS_SELECT_H
-HAVE_SYS_SELECT_H:INTERNAL=1
-//Have include HAVE_SYS_SOCKET_H
-HAVE_SYS_SOCKET_H:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TIMEB_H
-HAVE_SYS_TIMEB_H:INTERNAL=1
-//Have include HAVE_SYS_TIME_H
-HAVE_SYS_TIME_H:INTERNAL=1
-//Have include sys/types.h
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include HAVE_TIME_H
-HAVE_TIME_H:INTERNAL=1
-//Have include HAVE_UNISTD_H
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VA_COPY_COMPILED:INTERNAL=TRUE
-//Have symbol vfprintf
-HAVE_VFPRINTF:INTERNAL=1
-//Have symbol vsnprintf
-HAVE_VSNPRINTF:INTERNAL=1
-//Have symbol vsprintf
-HAVE_VSPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Result of TRY_COMPILE
-HAVE_VTK_UINTPTR_T:INTERNAL=TRUE
-//Have includes HAVE_WINDOWS_H
-HAVE_WINDOWS_H:INTERNAL=
-//Have symbol _stat
-HAVE__STAT:INTERNAL=
-//Result of TRY_COMPILE
-HAVE___VA_COPY_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: HGCOMMAND
-HGCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-KWSYS_CHAR_IS_SIGNED:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDIO_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_PTRDIFF_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_SSIZE_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_TYPE_MACROS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_BINARY_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-KWSYS_LFS_WORKS:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_LFS_WORKS_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF___INT64:INTERNAL=
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//ADVANCED property for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: MAKECOMMAND
-MAKECOMMAND-MODIFIED:INTERNAL=ON
-//Path to vtkMaterialLibraryMacro.h
-MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h
-//Path to vtkShaderCodeLibraryMacro.h
-MATERIAL_LIBRARY_SHADER_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h
-//ADVANCED property for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//Have symbol atanh
-PROJ_HAVE_ATANH:INTERNAL=1
-//Have symbol csin
-PROJ_HAVE_COMPLEX:INTERNAL=1
-//ADVANCED property for variable: PROJ_LIST_EXTERNAL
-PROJ_LIST_EXTERNAL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_GSL
-PROJ_USE_GSL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_PTHREADS
-PROJ_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkChartsPython
-PYTHON_ENABLE_MODULE_vtkChartsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXTRA_LIBS
-PYTHON_EXTRA_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_INCLUDE_DIR
-PYTHON_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Path to where Python.h is found (deprecated)
-PYTHON_INCLUDE_PATH:INTERNAL=@PREFIX_PATH@/Python.framework/Headers
-//ADVANCED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_MODULE_vtkChartsPython_BUILD_SHARED
-PYTHON_MODULE_vtkChartsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkCommonPython_BUILD_SHARED
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkHybridPython_BUILD_SHARED
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkIOPython_BUILD_SHARED
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkImagingPython_BUILD_SHARED
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkViewsPython_BUILD_SHARED
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-SHARED_LIBRARY_PATH_INFO_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-SHARED_LIBRARY_PATH_TYPE:INTERNAL=0
-//runtime library path variable name.
-SHARED_LIBRARY_PATH_VAR_NAME:INTERNAL=DYLD_LIBRARY_PATH
-//ADVANCED property for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_OFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_PTRDIFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SIZE_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SSIZE_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_UCHAR:INTERNAL=
-//ADVANCED property for variable: SLURM_SBATCH_COMMAND
-SLURM_SBATCH_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SLURM_SRUN_COMMAND
-SLURM_SRUN_COMMAND-ADVANCED:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//Result of TRY_COMPILE
-SUPPORT_IP6_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: SVNCOMMAND
-SVNCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_LIBRARY
-TCL_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_LIBRARY
-TCL_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_TCLSH
-TCL_TCLSH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_LIBRARY
-TK_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_LIBRARY
-TK_LIBRARY-MODIFIED:INTERNAL=ON
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//ADVANCED property for variable: VERDICT_BUILD_DOC
-VERDICT_BUILD_DOC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_ENABLE_TESTING
-VERDICT_ENABLE_TESTING-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE
-VERDICT_MANGLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE_PREFIX
-VERDICT_MANGLE_PREFIX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_USE_FLOAT
-VERDICT_USE_FLOAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VLI_LIBRARY_FOR_VP1000
-VLI_LIBRARY_FOR_VP1000-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//ADVANCED property for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//The directory in which code for Shaders is provided.
-VTK_DEFAULT_SHADERS_DIR:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-//String encoder.
-VTK_ENCODESTRING_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkEncodeString
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//ADVANCED property for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Have symbol feenableexcept
-VTK_HAS_FEENABLEEXCEPT:INTERNAL=
-//Have symbol isinf
-VTK_HAS_ISINF:INTERNAL=
-//Have symbol isnan
-VTK_HAS_ISNAN:INTERNAL=
-//Have symbol _isnan
-VTK_HAS__ISNAN:INTERNAL=
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Support for getsockname with socklen_t
-VTK_HAVE_GETSOCKNAME_WITH_SOCKLEN_T:INTERNAL=1
-//Have library socket
-VTK_HAVE_LIBSOCKET:INTERNAL=
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Have symbol SO_REUSEADDR
-VTK_HAVE_SO_REUSEADDR:INTERNAL=1
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_MATERIALS_DIRS
-VTK_MATERIALS_DIRS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_NO_PYTHON_THREADS
-VTK_NO_PYTHON_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkParseOGLExt
-//Install directory for Python .py and .pyc files
-VTK_PYTHON_MODULE_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//Install directory for Python binary modules
-VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//ADVANCED property for variable: VTK_PYTHON_SETUP_ARGS
-VTK_PYTHON_SETUP_ARGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_REQUIRED_OBJCXX_FLAGS
-VTK_REQUIRED_OBJCXX_FLAGS-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF___INT64:INTERNAL=
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=
-//ADVANCED property for variable: VTK_TESTING_LOCALE
-VTK_TESTING_LOCALE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_TESTING_USE_FPE
-VTK_TESTING_USE_FPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_TESTING_USE_LOCALE
-VTK_TESTING_USE_LOCALE-ADVANCED:INTERNAL=1
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-VTK_UINTPTR_T:INTERNAL=8
-//ADVANCED property for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_BOOST
-VTK_USE_BOOST-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_CG_SHADERS
-VTK_USE_CG_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_FFMPEG_ENCODER
-VTK_USE_FFMPEG_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GCC_VISIBILITY
-VTK_USE_GCC_VISIBILITY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_GLSL_SHADERS
-VTK_USE_GLSL_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GNU_R
-VTK_USE_GNU_R-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MATLAB_MEX
-VTK_USE_MATLAB_MEX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_METAIO
-VTK_USE_METAIO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MPEG2_ENCODER
-VTK_USE_MPEG2_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MYSQL
-VTK_USE_MYSQL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_ODBC
-VTK_USE_ODBC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_OGGTHEORA_ENCODER
-VTK_USE_OGGTHEORA_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_OPENFOAM
-VTK_USE_OPENFOAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_POSTGRES
-VTK_USE_POSTGRES-ADVANCED:INTERNAL=1
-//Build QVTK widget and plugin for Qt
-VTK_USE_QVTK:INTERNAL=OFF
-//ADVANCED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_GL2PS
-VTK_USE_SYSTEM_GL2PS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBPROJ4
-VTK_USE_SYSTEM_LIBPROJ4-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBXML2
-VTK_USE_SYSTEM_LIBXML2-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TDX
-VTK_USE_TDX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TK
-VTK_USE_TK-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_TK
-VTK_USE_TK-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_VOLUMEPRO_1000
-VTK_USE_VOLUMEPRO_1000-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_WRAP_PYTHON
-VTK_WRAP_PYTHON-MODIFIED:INTERNAL=ON
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPython
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_INIT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPythonInit
-//ADVANCED property for variable: X11_ICE_INCLUDE_PATH
-X11_ICE_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_ICE_LIB
-X11_ICE_LIB-ADVANCED:INTERNAL=1
-//Have library /usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-X11_LIB_X11_SOLO:INTERNAL=1
-//ADVANCED property for variable: X11_SM_LIB
-X11_SM_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XShm_INCLUDE_PATH
-X11_XShm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_INCLUDE_PATH
-X11_XTest_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_LIB
-X11_XTest_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessrules_INCLUDE_PATH
-X11_Xaccessrules_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessstr_INCLUDE_PATH
-X11_Xaccessstr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_INCLUDE_PATH
-X11_Xau_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_LIB
-X11_Xau_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_INCLUDE_PATH
-X11_Xcomposite_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_LIB
-X11_Xcomposite_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_INCLUDE_PATH
-X11_Xcursor_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_LIB
-X11_Xcursor_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_INCLUDE_PATH
-X11_Xdamage_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_LIB
-X11_Xdamage_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_INCLUDE_PATH
-X11_Xdmcp_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_LIB
-X11_Xdmcp_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_INCLUDE_PATH
-X11_Xfixes_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_LIB
-X11_Xfixes_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_INCLUDE_PATH
-X11_Xft_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_LIB
-X11_Xft_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_INCLUDE_PATH
-X11_Xinerama_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_LIB
-X11_Xinerama_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_INCLUDE_PATH
-X11_Xinput_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_LIB
-X11_Xinput_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkb_INCLUDE_PATH
-X11_Xkb_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkblib_INCLUDE_PATH
-X11_Xkblib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_INCLUDE_PATH
-X11_Xpm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_LIB
-X11_Xpm_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_INCLUDE_PATH
-X11_Xrandr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_LIB
-X11_Xrandr_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_INCLUDE_PATH
-X11_Xrender_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_LIB
-X11_Xrender_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_INCLUDE_PATH
-X11_Xscreensaver_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_LIB
-X11_Xscreensaver_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xshape_INCLUDE_PATH
-X11_Xshape_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_INCLUDE_PATH
-X11_Xt_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_LIB
-X11_Xt_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_INCLUDE_PATH
-X11_Xv_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_LIB
-X11_Xv_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xxf86misc_LIB
-X11_Xxf86misc_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_dpms_INCLUDE_PATH
-X11_dpms_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86misc_INCLUDE_PATH
-X11_xf86misc_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86vmode_INCLUDE_PATH
-X11_xf86vmode_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Already complained about update type.
-__CTEST_UPDATE_TYPE_COMPLAINED:INTERNAL=1
-//Have function _isinf
-float.h:INTERNAL=
-
diff --git a/exsrc/src/CMakeCache.txt.mac.in b/exsrc/src/CMakeCache.txt.mac.in
deleted file mode 100644
index 15a056b2839e0f6aae09bbbe48fba73c36ee2489..0000000000000000000000000000000000000000
--- a/exsrc/src/CMakeCache.txt.mac.in
+++ /dev/null
@@ -1,1965 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: @BUILD_DIR@/VTK-build
-# It was generated by CMake: cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=ON
-
-//Build Verdict with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=OFF
-
-//Path to a program.
-BZRCOMMAND:FILEPATH=BZRCOMMAND-NOTFOUND
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=
-
-//Path to a program.
-CMAKE_INSTALL_NAME_TOOL:FILEPATH=/usr/bin/install_name_tool
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=@PREFIX_PATH@
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=CMAKE_OBJCOPY-NOTFOUND
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=CMAKE_OBJDUMP-NOTFOUND
-
-//Build architectures for OSX
-CMAKE_OSX_ARCHITECTURES:STRING=
-
-//Minimum OS X version to target for deployment (at runtime); newer
-// APIs weak linked. Set to empty string for default value.
-CMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.5
-
-//The product will be built against the headers and libraries located
-// inside the indicated SDK.
-CMAKE_OSX_SYSROOT:PATH=/Developer/SDKs/MacOSX10.5.sdk
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=VTK
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=1
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=/usr/X11R6/lib/libSM.dylib;/usr/X11R6/lib/libICE.dylib;/usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-
-//Path to the coverage program that CTest uses for performing coverage
-// inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Maximum time allowed before CTest will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/DICOMParser
-
-//Path to a program.
-HGCOMMAND:FILEPATH=HGCOMMAND-NOTFOUND
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/make -i
-
-//Path to the memory checking command, used for memory error detection.
-MEMORYCHECK_COMMAND:FILEPATH=/usr/local/bin/valgrind
-
-//File that contains suppressions for the memory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//Value Computed by CMake
-MaterialLibrary_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary
-
-//Value Computed by CMake
-MaterialLibrary_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/MaterialLibrary
-
-//Include for OpenGL on OSX
-OPENGL_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//OpenGL lib for OSX
-OPENGL_gl_LIBRARY:FILEPATH=/usr/X11R6/lib/libGL.dylib
-
-//AGL lib for OSX
-OPENGL_glu_LIBRARY:FILEPATH=/usr/X11R6/lib/libGLU.dylib
-
-//Path to a file.
-OPENGL_xmesa_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//Does an external project define proj_list or should libproj4
-// define it?
-PROJ_LIST_EXTERNAL:BOOL=OFF
-
-//Should libproj4 include projection code that relies on GSL?
-PROJ_USE_GSL:BOOL=OFF
-
-//Should libproj4 be built as a thread-friendly library?
-PROJ_USE_PTHREADS:BOOL=OFF
-
-//Path to a library.
-PYTHON_DEBUG_LIBRARY:FILEPATH=/Library/Frameworks/python.framework
-
-//Add module vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython:BOOL=ON
-
-//Add module vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython:BOOL=ON
-
-//Add module vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython:BOOL=ON
-
-//Add module vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython:BOOL=ON
-
-//Add module vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython:BOOL=ON
-
-//Add module vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython:BOOL=ON
-
-//Add module vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython:BOOL=ON
-
-//Add module vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython:BOOL=ON
-
-//Add module vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython:BOOL=ON
-
-//Add module vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython:BOOL=ON
-
-//Add module vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython:BOOL=ON
-
-//Add module vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython:BOOL=ON
-
-//Add module vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython:BOOL=ON
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=@PREFIX_PATH@/bin/python@PYVER@
-
-//Extra libraries to link when linking to python (such as "z" for
-// zlib).  Separate multiple libraries with semicolons.
-PYTHON_EXTRA_LIBS:STRING=
-
-//Path to a file.
-PYTHON_INCLUDE_PATH:PATH=@PREFIX_PATH@/include/python@PYVER@
-
-//Path to a library.
-PYTHON_LIBRARY:FILEPATH=@PREFIX_PATH@/lib/libpython@PYVER@.dylib
-
-//Add module vtkCommonPython shared
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkFilteringPython shared
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGenericFilteringPython shared
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGeovisPython shared
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGraphicsPython shared
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkHybridPython shared
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkIOPython shared
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkImagingPython shared
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkInfovisPython shared
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkRenderingPython shared
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkViewsPython shared
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkVolumeRenderingPython shared
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkWidgetsPython shared
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED:BOOL=ON
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.dylib
-
-//Path to scp command, used by CTest for submitting results to
-// a Dart server
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=omar
-
-//Path to the SLURM sbatch executable
-SLURM_SBATCH_COMMAND:FILEPATH=SLURM_SBATCH_COMMAND-NOTFOUND
-
-//Path to the SLURM srun executable
-SLURM_SRUN_COMMAND:FILEPATH=SLURM_SRUN_COMMAND-NOTFOUND
-
-//Path to a program.
-SVNCOMMAND:FILEPATH=svn
-
-//Path to a file.
-TCL_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TCL_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=@EXTERNALS@/bin/tclsh@TCLTK_VERSION@
-
-//Path to a file.
-TK_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//The path to the Tk internal headers (tkMacOSXDefault.h).
-TK_INTERNAL_PATH:PATH=@BUILD_DIR@/VTK/Utilities/TclTk/internals/tk8.4
-
-//Path to a library.
-TK_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib
-
-//Build the 2007 Verdict User Manual
-VERDICT_BUILD_DOC:BOOL=OFF
-
-//Should tests of the VERDICT library be built?
-VERDICT_ENABLE_TESTING:BOOL=OFF
-
-//Mangle verdict names for inclusion in a larger library?
-VERDICT_MANGLE:BOOL=ON
-
-//VTK requires the verdict prefix to be vtk
-VERDICT_MANGLE_PREFIX:STRING=vtk
-
-//VTK requires doubles
-VERDICT_USE_FLOAT:BOOL=OFF
-
-//Path to a library.
-VLI_LIBRARY_FOR_VP1000:FILEPATH=VLI_LIBRARY_FOR_VP1000-NOTFOUND
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build
-
-//Path to a file.
-VTK_DATA_ROOT:PATH=@BUILD_DIR@/VTKData
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//; separated directories to search for materials/shaders
-VTK_MATERIALS_DIRS:STRING=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-
-//Disable multithreading support in the Python bindings
-VTK_NO_PYTHON_THREADS:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Arguments passed to "python setup.py install ..." during installation.
-VTK_PYTHON_SETUP_ARGS:STRING=--prefix="${CMAKE_INSTALL_PREFIX}"
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK
-
-//VTK tests call vtkFloatingPointExceptions::Enable()
-VTK_TESTING_USE_FPE:BOOL=ON
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=OFF
-
-//Use Boost libraries for graph algorithms - www.boost.org.
-VTK_USE_BOOST:BOOL=OFF
-
-//Build classes using Carbon API.
-VTK_USE_CARBON:BOOL=OFF
-
-//Build pixel and vertex shader support for Cg.
-VTK_USE_CG_SHADERS:BOOL=OFF
-
-//Build classes using Cocoa API.
-VTK_USE_COCOA:BOOL=OFF
-
-//Turn this option off and tests and warning/error macros will
-// not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//If the FFMPEG library is available, should VTK use it for saving
-// .avi animation files?
-VTK_USE_FFMPEG_ENCODER:BOOL=OFF
-
-//Build the vtkGeovis kit.  Needed for performing geographic visualization.
-VTK_USE_GEOVIS:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build pixel and vertex shader support for GLSL.
-VTK_USE_GLSL_SHADERS:BOOL=ON
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Build the vtkInfovis kit.  Needed for performing information
-// visualization.
-VTK_USE_INFOVIS:BOOL=ON
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Build metaio
-VTK_USE_METAIO:BOOL=ON
-
-//Enable use of the patented mpeg2 library. You are solely responsible
-// for any legal issues associated with using patented code in
-// your software.
-VTK_USE_MPEG2_ENCODER:BOOL=OFF
-
-//Build the MySQL driver for vtkSQLDatabase.
-VTK_USE_MYSQL:BOOL=OFF
-
-//Add support for arbitrary-dimension sparse and dense arrays.
-VTK_USE_N_WAY_ARRAYS:BOOL=OFF
-
-//Build the ODBC database interface
-VTK_USE_ODBC:BOOL=OFF
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the PostgreSQL driver for vtkSQLDatabase.
-VTK_USE_POSTGRES:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's proj4 library.
-VTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF
-
-//Use the system's libxml2 library.
-VTK_USE_SYSTEM_LIBXML2:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Build VTK with Tk support
-VTK_USE_TK:BOOL=ON
-
-//Build the vtkViews kit.  Needed for creating packaged and linked
-// views.
-VTK_USE_VIEWS:BOOL=ON
-
-//Enable support for VolumePro 1000.
-VTK_USE_VOLUMEPRO_1000:BOOL=OFF
-
-//Build classes for the X11 window system.
-VTK_USE_X:BOOL=ON
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Path to a file.
-VTK_WRAP_HINTS:FILEPATH=@BUILD_DIR@/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=OFF
-
-//Path to a file.
-X11_ICE_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_ICE_LIB:FILEPATH=/usr/X11R6/lib/libICE.dylib
-
-//Path to a library.
-X11_SM_LIB:FILEPATH=/usr/X11R6/lib/libSM.dylib
-
-//Path to a file.
-X11_X11_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.dylib
-
-//Path to a file.
-X11_XShm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_XTest_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_XTest_LIB:FILEPATH=/usr/X11R6/lib/libXtst.dylib
-
-//Path to a file.
-X11_Xaccessrules_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xaccessstr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xau_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xau_LIB:FILEPATH=/usr/X11R6/lib/libXau.dylib
-
-//Path to a file.
-X11_Xcomposite_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcomposite_LIB:FILEPATH=/usr/X11R6/lib/libXcomposite.dylib
-
-//Path to a file.
-X11_Xcursor_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcursor_LIB:FILEPATH=/usr/X11R6/lib/libXcursor.dylib
-
-//Path to a file.
-X11_Xdamage_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdamage_LIB:FILEPATH=/usr/X11R6/lib/libXdamage.dylib
-
-//Path to a file.
-X11_Xdmcp_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdmcp_LIB:FILEPATH=/usr/X11R6/lib/libXdmcp.dylib
-
-//Path to a library.
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.dylib
-
-//Path to a file.
-X11_Xfixes_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xfixes_LIB:FILEPATH=/usr/X11R6/lib/libXfixes.dylib
-
-//Path to a file.
-X11_Xft_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xft_LIB:FILEPATH=/usr/X11R6/lib/libXft.dylib
-
-//Path to a file.
-X11_Xinerama_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinerama_LIB:FILEPATH=/usr/X11R6/lib/libXinerama.dylib
-
-//Path to a file.
-X11_Xinput_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinput_LIB:FILEPATH=/usr/X11R6/lib/libXi.dylib
-
-//Path to a file.
-X11_Xkb_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xkblib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xlib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xpm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xpm_LIB:FILEPATH=/usr/X11R6/lib/libXpm.dylib
-
-//Path to a file.
-X11_Xrandr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrandr_LIB:FILEPATH=/usr/X11R6/lib/libXrandr.dylib
-
-//Path to a file.
-X11_Xrender_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrender_LIB:FILEPATH=/usr/X11R6/lib/libXrender.dylib
-
-//Path to a file.
-X11_Xscreensaver_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xscreensaver_LIB:FILEPATH=/usr/X11R6/lib/libXss.dylib
-
-//Path to a file.
-X11_Xshape_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xt_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xt_LIB:FILEPATH=/usr/X11R6/lib/libXt.dylib
-
-//Path to a file.
-X11_Xutil_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xv_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xv_LIB:FILEPATH=/usr/X11R6/lib/libXv.dylib
-
-//Path to a library.
-X11_Xxf86misc_LIB:FILEPATH=/usr/X11R6/lib/libXxf86misc.dylib
-
-//Path to a file.
-X11_dpms_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_fontconfig_LIB:FILEPATH=/usr/X11R6/lib/libfontconfig.dylib
-
-//Path to a file.
-X11_xf86misc_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86vmode_INCLUDE_PATH:PATH=/usr/include
-
-//Value Computed by CMake
-alglib_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkalglib
-
-//Value Computed by CMake
-alglib_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkalglib
-
-//Value Computed by CMake
-libproj4_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibproj4
-
-//Value Computed by CMake
-libproj4_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibproj4
-
-//Value Computed by CMake
-verdict_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/verdict
-
-//Value Computed by CMake
-verdict_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/verdict
-
-//Dependencies for the target
-vtkCommonPythonD_LIB_DEPENDS:STATIC=general;vtkCommon;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=general;vtksys;general;-lpthread;general;-lm;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=general;vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkGenericFiltering;general;vtkFilteringPythonD;general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=general;vtkGenericFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkGraphics;
-
-//Dependencies for the target
-vtkGeovisPythonD_LIB_DEPENDS:STATIC=general;vtkGeovis;general;vtkWidgetsPythonD;general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovisPython_LIB_DEPENDS:STATIC=general;vtkGeovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkViews;general;vtkproj4;
-
-//Dependencies for the target
-vtkGraphicsPythonD_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkverdict;
-
-//Dependencies for the target
-vtkHybridPythonD_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;vtkexoIIc;
-
-//Dependencies for the target
-vtkIOPythonD_LIB_DEPENDS:STATIC=general;vtkIO;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkDICOMParser;general;vtkNetCDF;general;vtkmetaio;general;vtksqlite;general;vtkpng;general;vtkzlib;general;vtkjpeg;general;vtktiff;general;vtkexpat;general;vtksys;
-
-//Dependencies for the target
-vtkImagingPythonD_LIB_DEPENDS:STATIC=general;vtkImaging;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=general;vtkFiltering;
-
-//Dependencies for the target
-vtkInfovisPythonD_LIB_DEPENDS:STATIC=general;vtkInfovis;general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovisPython_LIB_DEPENDS:STATIC=general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtklibxml2;general;vtkalglib;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkGraphicsPythonD;general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=general;vtkRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkImaging;general;vtkIO;general;vtkftgl;general;vtkfreetype;general;vtkzlib;general;vtkpng;general;/usr/X11R6/lib/libXt.dylib;general;/usr/X11R6/lib/libSM.dylib;general;/usr/X11R6/lib/libICE.dylib;general;/usr/X11R6/lib/libX11.dylib;general;/usr/X11R6/lib/libXext.dylib;general;/usr/X11R6/lib/libXss.dylib;general;/usr/X11R6/lib/libXft.dylib;general;/usr/X11R6/lib/libfontconfig.dylib;
-
-//Dependencies for the target
-vtkViewsPythonD_LIB_DEPENDS:STATIC=general;vtkViews;general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViewsPython_LIB_DEPENDS:STATIC=general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViews_LIB_DEPENDS:STATIC=general;vtkInfovis;
-
-//Dependencies for the target
-vtkVolumeRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkVolumeRendering;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=general;vtkVolumeRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;
-
-//Dependencies for the target
-vtkWidgetsPythonD_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkRenderingPythonD;general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkHybrid;
-
-//Dependencies for target
-vtkalglib_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=general;vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkfreetype_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=general;/usr/X11R6/lib/libGL.dylib;general;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtklibxml2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibxml2
-
-//Dependencies for the target
-vtklibxml2_LIB_DEPENDS:STATIC=general;vtkzlib;general;dl;general;-lpthread;general;m;
-
-//Value Computed by CMake
-vtklibxml2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibxml2
-
-//Value Computed by CMake
-vtkmetaio_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkmetaio_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtksys;
-
-//Value Computed by CMake
-vtkmetaio_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=general;vtkzlib;
-
-//Dependencies for the target
-vtkproj4_LIB_DEPENDS:STATIC=general;m;
-
-//Dependencies for target
-vtksqlite_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtkjpeg;
-
-//Dependencies for target
-vtkverdict_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-ALGLIB_SHARED_LIB:INTERNAL=ON
-//ADVANCED property for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: BUILD_EXAMPLES
-BUILD_EXAMPLES-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_SHARED_LIBS
-BUILD_SHARED_LIBS-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_TESTING
-BUILD_TESTING-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: BZRCOMMAND
-BZRCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/make
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=@BUILD_DIR@/VTK-build
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=0
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=cpack
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_C_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_CXX_ABI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_C_ABI_COMPILED:INTERNAL=TRUE
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=Unknown
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Result of TRY_COMPILE
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=TRUE
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include CMAKE_HAVE_LIMITS_H
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include CMAKE_HAVE_PTHREAD_H
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have includes CMAKE_HAVE_SYS_PRCTL_H
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=
-//Have include CMAKE_HAVE_UNISTD_H
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=@BUILD_DIR@/VTK
-//ADVANCED property for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_INSTALL_NAME_TOOL
-CMAKE_INSTALL_NAME_TOOL-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: CMAKE_INSTALL_PREFIX
-CMAKE_INSTALL_PREFIX-MODIFIED:INTERNAL=ON
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support sstream
-CMAKE_NO_ANSI_STRING_STREAM:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=44
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Test Support for 64 bit file systems
-CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=@EXTERNALS@/share/cmake-2.8
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_VOID_P:INTERNAL=4
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//Suppress Warnings that are meant for the author of the CMakeLists.txt
-// files.
-CMAKE_SUPPRESS_DEVELOPER_WARNINGS:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TEST_BIG_ENDIAN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//ADVANCED property for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//CXX compiler accepts flag -no-cpp-precomp
-CXX_HAS_CPP_PRECOMP_FLAG:INTERNAL=TRUE
-//ADVANCED property for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//Have include malloc.h
-EX_HAVE_MALLOC_H:INTERNAL=
-//Details about finding PythonInterp
-FIND_PACKAGE_MESSAGE_DETAILS_PythonInterp:INTERNAL=[@PREFIX_PATH@/bin/python@PYVER@]
-//Details about finding PythonLibs
-FIND_PACKAGE_MESSAGE_DETAILS_PythonLibs:INTERNAL=[@PREFIX_PATH@/lib/libpython@PYVER@.dylib][@PREFIX_PATH@/include/python@PYVER@]
-//Details about finding TCL
-FIND_PACKAGE_MESSAGE_DETAILS_TCL:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TCLTK
-FIND_PACKAGE_MESSAGE_DETAILS_TCLTK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include][@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TK
-FIND_PACKAGE_MESSAGE_DETAILS_TK:INTERNAL=[@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding Tclsh
-FIND_PACKAGE_MESSAGE_DETAILS_Tclsh:INTERNAL=[@EXTERNALS@/bin/tclsh@TCLTK_VERSION@]
-//Details about finding Threads
-FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE]
-//Details about finding X11
-FIND_PACKAGE_MESSAGE_DETAILS_X11:INTERNAL=[/usr/X11R6/lib/libX11.dylib][/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include]
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Have includes HAVE_ANSIDECL_H
-HAVE_ANSIDECL_H:INTERNAL=
-//Have include HAVE_ARPA_INET_H
-HAVE_ARPA_INET_H:INTERNAL=1
-//Have include HAVE_ARPA_NAMESER_H
-HAVE_ARPA_NAMESER_H:INTERNAL=1
-//Have include HAVE_ASSERT_H
-HAVE_ASSERT_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include HAVE_CTYPE_H
-HAVE_CTYPE_H:INTERNAL=1
-//Have include HAVE_DIRENT_H
-HAVE_DIRENT_H:INTERNAL=1
-//Have include HAVE_DLFCN_H
-HAVE_DLFCN_H:INTERNAL=1
-//Have library dl;-lpthread;m
-HAVE_DLOPEN:INTERNAL=1
-//Have includes HAVE_DL_H
-HAVE_DL_H:INTERNAL=
-//Have include HAVE_ERRNO_H
-HAVE_ERRNO_H:INTERNAL=1
-//Have include HAVE_FCNTL_H
-HAVE_FCNTL_H:INTERNAL=1
-//Have symbol finite
-HAVE_FINITE:INTERNAL=1
-//Have include HAVE_FLOAT_H
-HAVE_FLOAT_H:INTERNAL=1
-//Have function floor
-HAVE_FLOOR:INTERNAL=1
-//Have symbol fpclass
-HAVE_FPCLASS:INTERNAL=
-//Have symbol fprintf
-HAVE_FPRINTF:INTERNAL=1
-//Have symbol fp_class
-HAVE_FP_CLASS:INTERNAL=
-//Have includes HAVE_FP_CLASS_H
-HAVE_FP_CLASS_H:INTERNAL=
-//Have symbol ftime
-HAVE_FTIME:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_GETADDRINFO_COMPILED:INTERNAL=TRUE
-//Have function getopt
-HAVE_GETOPT:INTERNAL=1
-//Have symbol gettimeofday
-HAVE_GETTIMEOFDAY:INTERNAL=1
-//Have includes HAVE_IEEEFP_H
-HAVE_IEEEFP_H:INTERNAL=
-//Have include HAVE_INTTYPES_H
-HAVE_INTTYPES_H:INTERNAL=1
-//Have function isascii
-HAVE_ISASCII:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF___INT64:INTERNAL=FALSE
-//Have library dl;
-HAVE_LIBDL:INTERNAL=1
-//Have include HAVE_LIMITS_H
-HAVE_LIMITS_H:INTERNAL=1
-//Have symbol localtime
-HAVE_LOCALTIME:INTERNAL=1
-//Have includes HAVE_MALLOC_H
-HAVE_MALLOC_H:INTERNAL=
-//Have include HAVE_MATH_H
-HAVE_MATH_H:INTERNAL=1
-//Have function memmove
-HAVE_MEMMOVE:INTERNAL=1
-//Have include HAVE_MEMORY_H
-HAVE_MEMORY_H:INTERNAL=1
-//Have function memset
-HAVE_MEMSET:INTERNAL=1
-//Have function mmap
-HAVE_MMAP:INTERNAL=1
-//Have includes HAVE_NAN_H
-HAVE_NAN_H:INTERNAL=
-//Have includes HAVE_NDIR_H
-HAVE_NDIR_H:INTERNAL=
-//Have include HAVE_NETDB_H
-HAVE_NETDB_H:INTERNAL=1
-//Have include HAVE_NETINET_IN_H
-HAVE_NETINET_IN_H:INTERNAL=1
-//Have function pow
-HAVE_POW:INTERNAL=1
-//Have symbol printf
-HAVE_PRINTF:INTERNAL=1
-//Have include HAVE_PTHREAD_H
-HAVE_PTHREAD_H:INTERNAL=1
-//Have include HAVE_RESOLV_H
-HAVE_RESOLV_H:INTERNAL=1
-//Have library dld;dl
-HAVE_SHLLOAD:INTERNAL=
-//Have symbol signal
-HAVE_SIGNAL:INTERNAL=1
-//Have include HAVE_SIGNAL_H
-HAVE_SIGNAL_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UCHAR:INTERNAL=FALSE
-//Have symbol snprintf
-HAVE_SNPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SOCKLEN_T_COMPILED:INTERNAL=TRUE
-//Have symbol sprintf
-HAVE_SPRINTF:INTERNAL=1
-//Have function sqrt
-HAVE_SQRT:INTERNAL=1
-//Have symbol sscanf
-HAVE_SSCANF:INTERNAL=1
-//Have symbol stat
-HAVE_STAT:INTERNAL=1
-//Have include HAVE_STDARG_H
-HAVE_STDARG_H:INTERNAL=1
-//Have include stddef.h
-HAVE_STDDEF_H:INTERNAL=1
-//Have include stdint.h
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have function strcasecmp
-HAVE_STRCASECMP:INTERNAL=1
-//Have function strchr
-HAVE_STRCHR:INTERNAL=1
-//Have symbol strdup
-HAVE_STRDUP:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have symbol strftime
-HAVE_STRFTIME:INTERNAL=1
-//Have include HAVE_STRINGS_H
-HAVE_STRINGS_H:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//Have symbol strndup
-HAVE_STRNDUP:INTERNAL=
-//Have function strrchr
-HAVE_STRRCHR:INTERNAL=1
-//Have function strstr
-HAVE_STRSTR:INTERNAL=1
-//Have function strtol
-HAVE_STRTOL:INTERNAL=1
-//Have function areroul
-HAVE_STRTOUL:INTERNAL=
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_DIR_H_COMPILED:INTERNAL=TRUE
-//Have include HAVE_SYS_MMAN_H
-HAVE_SYS_MMAN_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_NDIR_H_COMPILED:INTERNAL=FALSE
-//Have include HAVE_SYS_SELECT_H
-HAVE_SYS_SELECT_H:INTERNAL=1
-//Have include HAVE_SYS_SOCKET_H
-HAVE_SYS_SOCKET_H:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TIMEB_H
-HAVE_SYS_TIMEB_H:INTERNAL=1
-//Have include HAVE_SYS_TIME_H
-HAVE_SYS_TIME_H:INTERNAL=1
-//Have include sys/types.h
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include HAVE_TIME_H
-HAVE_TIME_H:INTERNAL=1
-//Have include HAVE_UNISTD_H
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VA_COPY_COMPILED:INTERNAL=TRUE
-//Have symbol vfprintf
-HAVE_VFPRINTF:INTERNAL=1
-//Have symbol vsnprintf
-HAVE_VSNPRINTF:INTERNAL=1
-//Have symbol vsprintf
-HAVE_VSPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Have includes HAVE_WINDOWS_H
-HAVE_WINDOWS_H:INTERNAL=
-//Have symbol _stat
-HAVE__STAT:INTERNAL=
-//Result of TRY_COMPILE
-HAVE___VA_COPY_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: HGCOMMAND
-HGCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-KWSYS_CHAR_IS_SIGNED:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_PTRDIFF_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_SSIZE_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-KWSYS_LFS_WORKS:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_LFS_WORKS_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF___INT64:INTERNAL=
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//ADVANCED property for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//Path to vtkMaterialLibraryMacro.h
-MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h
-//Path to vtkShaderCodeLibraryMacro.h
-MATERIAL_LIBRARY_SHADER_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h
-//ADVANCED property for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_xmesa_INCLUDE_DIR
-OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Have symbol atanh
-PROJ_HAVE_ATANH:INTERNAL=1
-//Have symbol csin
-PROJ_HAVE_COMPLEX:INTERNAL=1
-//ADVANCED property for variable: PROJ_LIST_EXTERNAL
-PROJ_LIST_EXTERNAL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_GSL
-PROJ_USE_GSL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_PTHREADS
-PROJ_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_DEBUG_LIBRARY
-PYTHON_DEBUG_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXTRA_LIBS
-PYTHON_EXTRA_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_MODULE_vtkCommonPython_BUILD_SHARED
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkHybridPython_BUILD_SHARED
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkIOPython_BUILD_SHARED
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkImagingPython_BUILD_SHARED
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkViewsPython_BUILD_SHARED
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-SHARED_LIBRARY_PATH_INFO_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-SHARED_LIBRARY_PATH_TYPE:INTERNAL=0
-//runtime library path variable name.
-SHARED_LIBRARY_PATH_VAR_NAME:INTERNAL=DYLD_LIBRARY_PATH
-//ADVANCED property for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_OFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_PTRDIFF_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SSIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_UCHAR:INTERNAL=
-//ADVANCED property for variable: SLURM_SBATCH_COMMAND
-SLURM_SBATCH_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SLURM_SRUN_COMMAND
-SLURM_SRUN_COMMAND-ADVANCED:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//Result of TRY_COMPILE
-SUPPORT_IP6_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: SVNCOMMAND
-SVNCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_LIBRARY
-TCL_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_LIBRARY
-TCL_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_TCLSH
-TCL_TCLSH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TK_LIBRARY
-TK_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_LIBRARY
-TK_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_WISH
-TK_WISH-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//ADVANCED property for variable: VERDICT_BUILD_DOC
-VERDICT_BUILD_DOC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_ENABLE_TESTING
-VERDICT_ENABLE_TESTING-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE
-VERDICT_MANGLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE_PREFIX
-VERDICT_MANGLE_PREFIX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_USE_FLOAT
-VERDICT_USE_FLOAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VLI_LIBRARY_FOR_VP1000
-VLI_LIBRARY_FOR_VP1000-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//ADVANCED property for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//The directory in which code for Shaders is provided.
-VTK_DEFAULT_SHADERS_DIR:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-//String encoder.
-VTK_ENCODESTRING_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkEncodeString
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//ADVANCED property for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Support for getsockname with socklen_t
-VTK_HAVE_GETSOCKNAME_WITH_SOCKLEN_T:INTERNAL=1
-//Have library socket
-VTK_HAVE_LIBSOCKET:INTERNAL=
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Have symbol SO_REUSEADDR
-VTK_HAVE_SO_REUSEADDR:INTERNAL=1
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_MATERIALS_DIRS
-VTK_MATERIALS_DIRS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_NO_PYTHON_THREADS
-VTK_NO_PYTHON_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkParseOGLExt
-//Install directory for Python .py and .pyc files
-VTK_PYTHON_MODULE_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//Install directory for Python binary modules
-VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//ADVANCED property for variable: VTK_PYTHON_SETUP_ARGS
-VTK_PYTHON_SETUP_ARGS-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF___INT64:INTERNAL=
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=
-//ADVANCED property for variable: VTK_TESTING_USE_FPE
-VTK_TESTING_USE_FPE-ADVANCED:INTERNAL=1
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_BOOST
-VTK_USE_BOOST-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_CG_SHADERS
-VTK_USE_CG_SHADERS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_COCOA
-VTK_USE_COCOA-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_FFMPEG_ENCODER
-VTK_USE_FFMPEG_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_GLSL_SHADERS
-VTK_USE_GLSL_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_METAIO
-VTK_USE_METAIO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MPEG2_ENCODER
-VTK_USE_MPEG2_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MYSQL
-VTK_USE_MYSQL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_ODBC
-VTK_USE_ODBC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_POSTGRES
-VTK_USE_POSTGRES-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBPROJ4
-VTK_USE_SYSTEM_LIBPROJ4-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBXML2
-VTK_USE_SYSTEM_LIBXML2-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TK
-VTK_USE_TK-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_VOLUMEPRO_1000
-VTK_USE_VOLUMEPRO_1000-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_X
-VTK_USE_X-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_WRAP_PYTHON
-VTK_WRAP_PYTHON-MODIFIED:INTERNAL=ON
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPython
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_INIT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPythonInit
-//ADVANCED property for variable: X11_ICE_INCLUDE_PATH
-X11_ICE_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_ICE_LIB
-X11_ICE_LIB-ADVANCED:INTERNAL=1
-//Have library /usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-X11_LIB_X11_SOLO:INTERNAL=1
-//ADVANCED property for variable: X11_SM_LIB
-X11_SM_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XShm_INCLUDE_PATH
-X11_XShm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_INCLUDE_PATH
-X11_XTest_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_LIB
-X11_XTest_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessrules_INCLUDE_PATH
-X11_Xaccessrules_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessstr_INCLUDE_PATH
-X11_Xaccessstr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_INCLUDE_PATH
-X11_Xau_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_LIB
-X11_Xau_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_INCLUDE_PATH
-X11_Xcomposite_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_LIB
-X11_Xcomposite_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_INCLUDE_PATH
-X11_Xcursor_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_LIB
-X11_Xcursor_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_INCLUDE_PATH
-X11_Xdamage_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_LIB
-X11_Xdamage_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_INCLUDE_PATH
-X11_Xdmcp_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_LIB
-X11_Xdmcp_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_INCLUDE_PATH
-X11_Xfixes_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_LIB
-X11_Xfixes_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_INCLUDE_PATH
-X11_Xft_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_LIB
-X11_Xft_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_INCLUDE_PATH
-X11_Xinerama_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_LIB
-X11_Xinerama_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_INCLUDE_PATH
-X11_Xinput_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_LIB
-X11_Xinput_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkb_INCLUDE_PATH
-X11_Xkb_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkblib_INCLUDE_PATH
-X11_Xkblib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_INCLUDE_PATH
-X11_Xpm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_LIB
-X11_Xpm_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_INCLUDE_PATH
-X11_Xrandr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_LIB
-X11_Xrandr_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_INCLUDE_PATH
-X11_Xrender_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_LIB
-X11_Xrender_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_INCLUDE_PATH
-X11_Xscreensaver_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_LIB
-X11_Xscreensaver_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xshape_INCLUDE_PATH
-X11_Xshape_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_INCLUDE_PATH
-X11_Xt_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_LIB
-X11_Xt_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_INCLUDE_PATH
-X11_Xv_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_LIB
-X11_Xv_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xxf86misc_LIB
-X11_Xxf86misc_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_dpms_INCLUDE_PATH
-X11_dpms_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_fontconfig_LIB
-X11_fontconfig_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86misc_INCLUDE_PATH
-X11_xf86misc_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86vmode_INCLUDE_PATH
-X11_xf86vmode_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Already complained about update type.
-__CTEST_UPDATE_TYPE_COMPLAINED:INTERNAL=1
-
diff --git a/exsrc/src/cmake/multi.c b/exsrc/src/cmake/multi.c
deleted file mode 100644
index e4edb4d4624363c25e2c548fb75e04b82f33084f..0000000000000000000000000000000000000000
--- a/exsrc/src/cmake/multi.c
+++ /dev/null
@@ -1,1978 +0,0 @@
-/***************************************************************************
- *                                  _   _ ____  _
- *  Project                     ___| | | |  _ \| |
- *                             / __| | | | |_) | |
- *                            | (__| |_| |  _ <| |___
- *                             \___|\___/|_| \_\_____|
- *
- * Copyright (C) 1998 - 2007, Daniel Stenberg, <daniel@haxx.se>, et al.
- *
- * This software is licensed as described in the file COPYING, which
- * you should have received as part of this distribution. The terms
- * are also available at http://curl.haxx.se/docs/copyright.html.
- *
- * You may opt to use, copy, modify, merge, publish, distribute and/or sell
- * copies of the Software, and permit persons to whom the Software is
- * furnished to do so, under the terms of the COPYING file.
- *
- * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
- * KIND, either express or implied.
- *
- * $Id: multi.c,v 1.2 2007-03-15 19:22:13 andy Exp $
- ***************************************************************************/
-
-#include "setup.h"
-#include <stdlib.h>
-#include <string.h>
-
-#ifdef HAVE_SYS_TYPES_H
-#include <sys/types.h>
-#endif
-#ifdef HAVE_SYS_SOCKET_H
-#include <sys/socket.h>
-#endif
-#ifdef HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-
-#include <curl/curl.h>
-
-#include "urldata.h"
-#include "transfer.h"
-#include "url.h"
-#include "connect.h"
-#include "progress.h"
-#include "memory.h"
-#include "easyif.h"
-#include "multiif.h"
-#include "sendf.h"
-#include "timeval.h"
-
-/* The last #include file should be: */
-#include "memdebug.h"
-
-struct Curl_message {
-  /* the 'CURLMsg' is the part that is visible to the external user */
-  struct CURLMsg extmsg;
-  struct Curl_message *next;
-};
-
-typedef enum {
-  CURLM_STATE_INIT,        /* start in this state */
-  CURLM_STATE_CONNECT,     /* resolve/connect has been sent off */
-  CURLM_STATE_WAITRESOLVE, /* awaiting the resolve to finalize */
-  CURLM_STATE_WAITCONNECT, /* awaiting the connect to finalize */
-  CURLM_STATE_PROTOCONNECT, /* completing the protocol-specific connect
-                               phase */
-  CURLM_STATE_WAITDO,      /* wait for our turn to send the request */
-  CURLM_STATE_DO,          /* start send off the request (part 1) */
-  CURLM_STATE_DOING,       /* sending off the request (part 1) */
-  CURLM_STATE_DO_MORE,     /* send off the request (part 2) */
-  CURLM_STATE_DO_DONE,     /* done sending off request */
-  CURLM_STATE_WAITPERFORM, /* wait for our turn to read the response */
-  CURLM_STATE_PERFORM,     /* transfer data */
-  CURLM_STATE_TOOFAST,     /* wait because limit-rate exceeded */
-  CURLM_STATE_DONE,        /* post data transfer operation */
-  CURLM_STATE_COMPLETED,   /* operation complete */
-  CURLM_STATE_CANCELLED,   /* cancelled */
-
-  CURLM_STATE_LAST /* not a true state, never use this */
-} CURLMstate;
-
-/* we support N sockets per easy handle. Set the corresponding bit to what
-   action we should wait for */
-#define MAX_SOCKSPEREASYHANDLE 5
-#define GETSOCK_READABLE (0x00ff)
-#define GETSOCK_WRITABLE (0xff00)
-
-struct closure {
-  struct closure *next; /* a simple one-way list of structs */
-  struct SessionHandle *easy_handle;
-};
-
-struct Curl_one_easy {
-  /* first, two fields for the linked list of these */
-  struct Curl_one_easy *next;
-  struct Curl_one_easy *prev;
-
-  struct SessionHandle *easy_handle; /* the easy handle for this unit */
-  struct connectdata *easy_conn;     /* the "unit's" connection */
-
-  CURLMstate state;  /* the handle's state */
-  CURLcode result;   /* previous result */
-
-  struct Curl_message *msg; /* A pointer to one single posted message.
-                               Cleanup should be done on this pointer NOT on
-                               the linked list in Curl_multi.  This message
-                               will be deleted when this handle is removed
-                               from the multi-handle */
-  int msg_num; /* number of messages left in 'msg' to return */
-
-  /* Array with the plain socket numbers this handle takes care of, in no
-     particular order. Note that all sockets are added to the sockhash, where
-     the state etc are also kept. This array is mostly used to detect when a
-     socket is to be removed from the hash. See singlesocket(). */
-  curl_socket_t sockets[MAX_SOCKSPEREASYHANDLE];
-  int numsocks;
-};
-
-#define CURL_MULTI_HANDLE 0x000bab1e
-
-#define GOOD_MULTI_HANDLE(x) \
-  ((x)&&(((struct Curl_multi *)x)->type == CURL_MULTI_HANDLE))
-#define GOOD_EASY_HANDLE(x) \
- (((struct SessionHandle *)x)->magic == CURLEASY_MAGIC_NUMBER)
-
-/* This is the struct known as CURLM on the outside */
-struct Curl_multi {
-  /* First a simple identifier to easier detect if a user mix up
-     this multi handle with an easy handle. Set this to CURL_MULTI_HANDLE. */
-  long type;
-
-  /* We have a linked list with easy handles */
-  struct Curl_one_easy easy;
-
-  int num_easy; /* amount of entries in the linked list above. */
-  int num_msgs; /* amount of messages in the easy handles */
-  int num_alive; /* amount of easy handles that are added but have not yet
-                    reached COMPLETE state */
-
-  /* callback function and user data pointer for the *socket() API */
-  curl_socket_callback socket_cb;
-  void *socket_userp;
-
-  /* Hostname cache */
-  struct curl_hash *hostcache;
-
-  /* timetree points to the splay-tree of time nodes to figure out expire
-     times of all currently set timers */
-  struct Curl_tree *timetree;
-
-  /* 'sockhash' is the lookup hash for socket descriptor => easy handles (note
-     the pluralis form, there can be more than one easy handle waiting on the
-     same actual socket) */
-  struct curl_hash *sockhash;
-
-  /* Whether pipelining is enabled for this multi handle */
-  bool pipelining_enabled;
-
-  /* shared connection cache */
-  struct conncache *connc;
-
-  /* list of easy handles kept around for doing nice connection closures */
-  struct closure *closure;
-
-  /* timer callback and user data pointer for the *socket() API */
-  curl_multi_timer_callback timer_cb;
-  void *timer_userp;
-  time_t timer_lastcall; /* the fixed time for the timeout for the previous
-                            callback */
-};
-
-static bool multi_conn_using(struct Curl_multi *multi,
-                             struct SessionHandle *data);
-static void singlesocket(struct Curl_multi *multi,
-                         struct Curl_one_easy *easy);
-static void add_closure(struct Curl_multi *multi,
-                        struct SessionHandle *data);
-static int update_timer(struct Curl_multi *multi);
-
-#ifdef CURLDEBUG
-static const char *statename[]={
-  "INIT",
-  "CONNECT",
-  "WAITRESOLVE",
-  "WAITCONNECT",
-  "PROTOCONNECT",
-  "WAITDO",
-  "DO",
-  "DOING",
-  "DO_MORE",
-  "DO_DONE",
-  "WAITPERFORM",
-  "PERFORM",
-  "TOOFAST",
-  "DONE",
-  "COMPLETED",
-  "CANCELLED"
-};
-
-void curl_multi_dump(CURLM *multi_handle);
-#endif
-
-/* always use this function to change state, to make debugging easier */
-static void multistate(struct Curl_one_easy *easy, CURLMstate state)
-{
-#ifdef CURLDEBUG
-  long index = -1;
-#endif
-  CURLMstate oldstate = easy->state;
-
-  if(oldstate == state)
-    /* don't bother when the new state is the same as the old state */
-    return;
-
-  easy->state = state;
-
-#ifdef CURLDEBUG
-  if(easy->state > CURLM_STATE_CONNECT &&
-     easy->state < CURLM_STATE_COMPLETED)
-    index = easy->easy_conn->connectindex;
-
-  infof(easy->easy_handle,
-        "STATE: %s => %s handle %p; (connection #%ld) \n",
-        statename[oldstate], statename[easy->state],
-        (char *)easy, index);
-#endif
-  if(state == CURLM_STATE_COMPLETED)
-    /* changing to COMPLETED means there's one less easy handle 'alive' */
-    easy->easy_handle->multi->num_alive--;
-}
-
-/*
- * We add one of these structs to the sockhash for a particular socket
- */
-
-struct Curl_sh_entry {
-  struct SessionHandle *easy;
-  time_t timestamp;
-  long inuse;
-  int action;  /* what action READ/WRITE this socket waits for */
-  curl_socket_t socket; /* mainly to ease debugging */
-  void *socketp; /* settable by users with curl_multi_assign() */
-};
-/* bits for 'action' having no bits means this socket is not expecting any
-   action */
-#define SH_READ  1
-#define SH_WRITE 2
-
-/* make sure this socket is present in the hash for this handle */
-static struct Curl_sh_entry *sh_addentry(struct curl_hash *sh,
-                                         curl_socket_t s,
-                                         struct SessionHandle *data)
-{
-  struct Curl_sh_entry *there =
-    Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t));
-  struct Curl_sh_entry *check;
-
-  if(there)
-    /* it is present, return fine */
-    return there;
-
-  /* not present, add it */
-  check = calloc(sizeof(struct Curl_sh_entry), 1);
-  if(!check)
-    return NULL; /* major failure */
-  check->easy = data;
-  check->socket = s;
-
-  /* make/add new hash entry */
-  if(NULL == Curl_hash_add(sh, (char *)&s, sizeof(curl_socket_t), check)) {
-    free(check);
-    return NULL; /* major failure */
-  }
-
-  return check; /* things are good in sockhash land */
-}
-
-
-/* delete the given socket + handle from the hash */
-static void sh_delentry(struct curl_hash *sh, curl_socket_t s)
-{
-  struct Curl_sh_entry *there =
-    Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t));
-
-  if(there) {
-    /* this socket is in the hash */
-    /* We remove the hash entry. (This'll end up in a call to
-       sh_freeentry().) */
-    Curl_hash_delete(sh, (char *)&s, sizeof(curl_socket_t));
-  }
-}
-
-/*
- * free a sockhash entry
- */
-static void sh_freeentry(void *freethis)
-{
-  struct Curl_sh_entry *p = (struct Curl_sh_entry *) freethis;
-
-  free(p);
-}
-
-/*
- * sh_init() creates a new socket hash and returns the handle for it.
- *
- * Quote from README.multi_socket:
- *
- * "Some tests at 7000 and 9000 connections showed that the socket hash lookup
- * is somewhat of a bottle neck. Its current implementation may be a bit too
- * limiting. It simply has a fixed-size array, and on each entry in the array
- * it has a linked list with entries. So the hash only checks which list to
- * scan through. The code I had used so for used a list with merely 7 slots
- * (as that is what the DNS hash uses) but with 7000 connections that would
- * make an average of 1000 nodes in each list to run through. I upped that to
- * 97 slots (I believe a prime is suitable) and noticed a significant speed
- * increase.  I need to reconsider the hash implementation or use a rather
- * large default value like this. At 9000 connections I was still below 10us
- * per call."
- *
- */
-static struct curl_hash *sh_init(void)
-{
-  return Curl_hash_alloc(97, sh_freeentry);
-}
-
-CURLM *curl_multi_init(void)
-{
-  struct Curl_multi *multi = (void *)calloc(sizeof(struct Curl_multi), 1);
-
-  if(!multi)
-    return NULL;
-
-  multi->type = CURL_MULTI_HANDLE;
-
-  multi->hostcache = Curl_mk_dnscache();
-  if(!multi->hostcache) {
-    /* failure, free mem and bail out */
-    free(multi);
-    return NULL;
-  }
-
-  multi->sockhash = sh_init();
-  if(!multi->sockhash) {
-    /* failure, free mem and bail out */
-    Curl_hash_destroy(multi->hostcache);
-    free(multi);
-    return NULL;
-  }
-
-  multi->connc = Curl_mk_connc(CONNCACHE_MULTI, -1);
-  if(!multi->connc) {
-    Curl_hash_destroy(multi->hostcache);
-    free(multi);
-    return NULL;
-  }
-
-  return (CURLM *) multi;
-}
-
-CURLMcode curl_multi_add_handle(CURLM *multi_handle,
-                                CURL *easy_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  struct closure *cl;
-  struct closure *prev=NULL;
-
-  /* First, make some basic checks that the CURLM handle is a good handle */
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  /* Verify that we got a somewhat good easy handle too */
-  if(!GOOD_EASY_HANDLE(easy_handle))
-    return CURLM_BAD_EASY_HANDLE;
-
-  /* Prevent users to add the same handle more than once! */
-  if(((struct SessionHandle *)easy_handle)->multi)
-    /* possibly we should create a new unique error code for this condition */
-    return CURLM_BAD_EASY_HANDLE;
-
-  /* Now, time to add an easy handle to the multi stack */
-  easy = (struct Curl_one_easy *)calloc(sizeof(struct Curl_one_easy), 1);
-  if(!easy)
-    return CURLM_OUT_OF_MEMORY;
-
-  cl = multi->closure;
-  while(cl) {
-    struct closure *next = cl->next;
-    if(cl->easy_handle == (struct SessionHandle *)easy_handle) {
-      /* remove this handle from the closure list */
-      free(cl);
-      if(prev)
-        prev->next = next;
-      else
-        multi->closure = next;
-      break; /* no need to continue since this handle can only be present once
-                in the list */
-    }
-    cl = next;
-  }
-
-  /* set the easy handle */
-  easy->easy_handle = easy_handle;
-  multistate(easy, CURLM_STATE_INIT);
-
-  /* for multi interface connections, we share DNS cache automatically if the
-     easy handle's one is currently private. */
-  if (easy->easy_handle->dns.hostcache &&
-      (easy->easy_handle->dns.hostcachetype == HCACHE_PRIVATE)) {
-    Curl_hash_destroy(easy->easy_handle->dns.hostcache);
-    easy->easy_handle->dns.hostcache = NULL;
-    easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-  }
-
-  if (!easy->easy_handle->dns.hostcache ||
-      (easy->easy_handle->dns.hostcachetype == HCACHE_NONE)) {
-    easy->easy_handle->dns.hostcache = multi->hostcache;
-    easy->easy_handle->dns.hostcachetype = HCACHE_MULTI;
-  }
-
-  if(easy->easy_handle->state.connc) {
-    if(easy->easy_handle->state.connc->type == CONNCACHE_PRIVATE) {
-      /* kill old private version */
-      Curl_rm_connc(easy->easy_handle->state.connc);
-      /* point out our shared one instead */
-      easy->easy_handle->state.connc = multi->connc;
-    }
-    /* else it is already using multi? */
-  }
-  else
-    /* point out our shared one */
-    easy->easy_handle->state.connc = multi->connc;
-
-  /* Make sure the type is setup correctly */
-  easy->easy_handle->state.connc->type = CONNCACHE_MULTI;
-
-  /* We add this new entry first in the list. We make our 'next' point to the
-     previous next and our 'prev' point back to the 'first' struct */
-  easy->next = multi->easy.next;
-  easy->prev = &multi->easy;
-
-  /* make 'easy' the first node in the chain */
-  multi->easy.next = easy;
-
-  /* if there was a next node, make sure its 'prev' pointer links back to
-     the new node */
-  if(easy->next)
-    easy->next->prev = easy;
-
-  Curl_easy_addmulti(easy_handle, multi_handle);
-
-  /* make the SessionHandle struct refer back to this struct */
-  easy->easy_handle->set.one_easy = easy;
-
-  /* increase the node-counter */
-  multi->num_easy++;
-
-  if((multi->num_easy * 4) > multi->connc->num) {
-    /* We want the connection cache to have plenty room. Before we supported
-       the shared cache every single easy handle had 5 entries in their cache
-       by default. */
-    CURLcode res = Curl_ch_connc(easy_handle, multi->connc,
-                                 multi->connc->num*4);
-    if(res != CURLE_OK)
-      /* TODO: we need to do some cleaning up here! */
-      return CURLM_OUT_OF_MEMORY;
-  }
-
-  /* increase the alive-counter */
-  multi->num_alive++;
-
-  update_timer(multi);
-  return CURLM_OK;
-}
-
-#if 0
-/* Debug-function, used like this:
- *
- * Curl_hash_print(multi->sockhash, debug_print_sock_hash);
- *
- * Enable the hash print function first by editing hash.c
- */
-static void debug_print_sock_hash(void *p)
-{
-  struct Curl_sh_entry *sh = (struct Curl_sh_entry *)p;
-
-  fprintf(stderr, " [easy %p/magic %x/socket %d]",
-          (void *)sh->easy, sh->easy->magic, sh->socket);
-}
-#endif
-
-CURLMcode curl_multi_remove_handle(CURLM *multi_handle,
-                                   CURL *curl_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-
-  /* First, make some basic checks that the CURLM handle is a good handle */
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  /* Verify that we got a somewhat good easy handle too */
-  if(!GOOD_EASY_HANDLE(curl_handle))
-    return CURLM_BAD_EASY_HANDLE;
-
-  /* scan through the list and remove the 'curl_handle' */
-  easy = multi->easy.next;
-  while(easy) {
-    if(easy->easy_handle == (struct SessionHandle *)curl_handle)
-      break;
-    easy=easy->next;
-  }
-
-  if(easy) {
-    bool premature = (bool)(easy->state != CURLM_STATE_COMPLETED);
-
-    /* If the 'state' is not INIT or COMPLETED, we might need to do something
-       nice to put the easy_handle in a good known state when this returns. */
-    if(premature)
-      /* this handle is "alive" so we need to count down the total number of
-         alive connections when this is removed */
-      multi->num_alive--;
-
-    if (easy->easy_handle->state.is_in_pipeline &&
-        easy->state > CURLM_STATE_DO) {
-      /* If the handle is in a pipeline and has finished sending off its
-         request, we need to remember the fact that we want to remove this
-         handle but do the actual removal at a later time */
-      easy->easy_handle->state.cancelled = TRUE;
-      return CURLM_OK;
-    }
-
-    /* The timer must be shut down before easy->multi is set to NULL,
-       else the timenode will remain in the splay tree after
-       curl_easy_cleanup is called. */
-    Curl_expire(easy->easy_handle, 0);
-
-    if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) {
-      /* clear out the usage of the shared DNS cache */
-      easy->easy_handle->dns.hostcache = NULL;
-      easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-    }
-
-    /* if we have a connection we must call Curl_done() here so that we
-       don't leave a half-baked one around */
-    if(easy->easy_conn) {
-      /* Set up the association right */
-      easy->easy_conn->data = easy->easy_handle;
-
-      /* Curl_done() clears the conn->data field to lose the association
-         between the easy handle and the connection */
-      Curl_done(&easy->easy_conn, easy->result, premature);
-
-      if(easy->easy_conn)
-        /* the connection is still alive, set back the association to enable
-           the check below to trigger TRUE */
-        easy->easy_conn->data = easy->easy_handle;
-    }
-
-    /* If this easy_handle was the last one in charge for one or more
-       connections a the shared connection cache, we might need to keep this
-       handle around until either A) the connection is closed and killed
-       properly, or B) another easy_handle uses the connection.
-
-       The reason why we need to have a easy_handle associated with a live
-       connection is simply that some connections will need a handle to get
-       closed down properly. Currently, the only connections that need to keep
-       a easy_handle handle around are using FTP(S). Such connections have
-       the PROT_CLOSEACTION bit set.
-
-       Thus, we need to check for all connections in the shared cache that
-       points to this handle and are using PROT_CLOSEACTION. If there's any,
-       we need to add this handle to the list of "easy handles kept around for
-       nice connection closures".
-     */
-    if(multi_conn_using(multi, easy->easy_handle)) {
-      /* There's at least one connection using this handle so we must keep
-         this handle around. We also keep the connection cache pointer
-         pointing to the shared one since that will be used on close as
-         well. */
-      easy->easy_handle->state.shared_conn = multi;
-
-      /* this handle is still being used by a shared connection cache and
-         thus we leave it around for now */
-      add_closure(multi, easy->easy_handle);
-    }
-
-    if(easy->easy_handle->state.connc->type == CONNCACHE_MULTI) {
-      /* if this was using the shared connection cache we clear the pointer
-         to that since we're not part of that handle anymore */
-      easy->easy_handle->state.connc = NULL;
-
-      /* and modify the connectindex since this handle can't point to the
-         connection cache anymore */
-      if(easy->easy_conn)
-        easy->easy_conn->connectindex = -1;
-    }
-
-    /* change state without using multistate(), only to make singlesocket() do
-       what we want */
-    easy->state = CURLM_STATE_COMPLETED;
-    singlesocket(multi, easy); /* to let the application know what sockets
-                                  that vanish with this handle */
-
-    Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association
-                                                    to this multi handle */
-
-    /* make the previous node point to our next */
-    if(easy->prev)
-      easy->prev->next = easy->next;
-    /* make our next point to our previous node */
-    if(easy->next)
-      easy->next->prev = easy->prev;
-
-    easy->easy_handle->set.one_easy = NULL; /* detached */
-
-    /* NOTE NOTE NOTE
-       We do not touch the easy handle here! */
-    if (easy->msg)
-      free(easy->msg);
-    free(easy);
-
-    multi->num_easy--; /* one less to care about now */
-
-    update_timer(multi);
-    return CURLM_OK;
-  }
-  else
-    return CURLM_BAD_EASY_HANDLE; /* twasn't found */
-}
-
-bool Curl_multi_canPipeline(struct Curl_multi* multi)
-{
-  return multi->pipelining_enabled;
-}
-
-static int waitconnect_getsock(struct connectdata *conn,
-                               curl_socket_t *sock,
-                               int numsocks)
-{
-  if(!numsocks)
-    return GETSOCK_BLANK;
-
-  sock[0] = conn->sock[FIRSTSOCKET];
-  return GETSOCK_WRITESOCK(0);
-}
-
-static int domore_getsock(struct connectdata *conn,
-                          curl_socket_t *sock,
-                          int numsocks)
-{
-  if(!numsocks)
-    return GETSOCK_BLANK;
-
-  /* When in DO_MORE state, we could be either waiting for us
-     to connect to a remote site, or we could wait for that site
-     to connect to us. It makes a difference in the way: if we
-     connect to the site we wait for the socket to become writable, if
-     the site connects to us we wait for it to become readable */
-  sock[0] = conn->sock[SECONDARYSOCKET];
-
-  return GETSOCK_WRITESOCK(0);
-}
-
-/* returns bitmapped flags for this handle and its sockets */
-static int multi_getsock(struct Curl_one_easy *easy,
-                         curl_socket_t *socks, /* points to numsocks number
-                                                 of sockets */
-                         int numsocks)
-{
-  if (easy->easy_handle->state.pipe_broke) {
-    return 0;
-  }
-
-  if (easy->state > CURLM_STATE_CONNECT &&
-      easy->state < CURLM_STATE_COMPLETED) {
-    /* Set up ownership correctly */
-    easy->easy_conn->data = easy->easy_handle;
-  }
-
-  switch(easy->state) {
-  case CURLM_STATE_TOOFAST:  /* returns 0, so will not select. */
-  default:
-    /* this will get called with CURLM_STATE_COMPLETED when a handle is
-       removed */
-    return 0;
-
-  case CURLM_STATE_WAITRESOLVE:
-    return Curl_resolv_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_PROTOCONNECT:
-    return Curl_protocol_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_DOING:
-    return Curl_doing_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_WAITCONNECT:
-    return waitconnect_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_DO_MORE:
-    return domore_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_PERFORM:
-  case CURLM_STATE_WAITPERFORM:
-    return Curl_single_getsock(easy->easy_conn, socks, numsocks);
-  }
-
-}
-
-CURLMcode curl_multi_fdset(CURLM *multi_handle,
-                           fd_set *read_fd_set, fd_set *write_fd_set,
-                           fd_set *exc_fd_set, int *max_fd)
-{
-  /* Scan through all the easy handles to get the file descriptors set.
-     Some easy handles may not have connected to the remote host yet,
-     and then we must make sure that is done. */
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  int this_max_fd=-1;
-  curl_socket_t sockbunch[MAX_SOCKSPEREASYHANDLE];
-  int bitmap;
-  int i;
-  (void)exc_fd_set; /* not used */
-
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  easy=multi->easy.next;
-  while(easy) {
-    bitmap = multi_getsock(easy, sockbunch, MAX_SOCKSPEREASYHANDLE);
-
-    for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++) {
-      curl_socket_t s = CURL_SOCKET_BAD;
-
-      if(bitmap & GETSOCK_READSOCK(i)) {
-        FD_SET(sockbunch[i], read_fd_set);
-        s = sockbunch[i];
-      }
-      if(bitmap & GETSOCK_WRITESOCK(i)) {
-        FD_SET(sockbunch[i], write_fd_set);
-        s = sockbunch[i];
-      }
-      if(s == CURL_SOCKET_BAD)
-        /* this socket is unused, break out of loop */
-        break;
-      else {
-        if((int)s > this_max_fd)
-          this_max_fd = (int)s;
-      }
-    }
-
-    easy = easy->next; /* check next handle */
-  }
-
-  *max_fd = this_max_fd;
-
-  return CURLM_OK;
-}
-
-static CURLMcode multi_runsingle(struct Curl_multi *multi,
-                                 struct Curl_one_easy *easy)
-{
-  struct Curl_message *msg = NULL;
-  bool connected;
-  bool async;
-  bool protocol_connect;
-  bool dophase_done;
-  bool done;
-  CURLMcode result = CURLM_OK;
-  struct Curl_transfer_keeper *k;
-
-  do {
-
-    if(!GOOD_EASY_HANDLE(easy->easy_handle))
-      return CURLM_BAD_EASY_HANDLE;
-
-    if (easy->easy_handle->state.pipe_broke) {
-      infof(easy->easy_handle, "Pipe broke: handle 0x%x, url = %s\n",
-            easy, easy->easy_handle->reqdata.path);
-      if(easy->easy_handle->state.is_in_pipeline) {
-        /* Head back to the CONNECT state */
-        multistate(easy, CURLM_STATE_CONNECT);
-        result = CURLM_CALL_MULTI_PERFORM;
-        easy->result = CURLE_OK;
-      } else {
-        easy->result = CURLE_COULDNT_CONNECT;
-        multistate(easy, CURLM_STATE_COMPLETED);
-      }
-
-      easy->easy_handle->state.pipe_broke = FALSE;
-      easy->easy_conn = NULL;
-      break;
-    }
-
-    if (easy->state > CURLM_STATE_CONNECT &&
-        easy->state < CURLM_STATE_COMPLETED) {
-      /* Make sure we set the connection's current owner */
-      easy->easy_conn->data = easy->easy_handle;
-    }
-
-    if (CURLM_STATE_WAITCONNECT <= easy->state &&
-        easy->state <= CURLM_STATE_DO &&
-        easy->easy_handle->change.url_changed) {
-      char *gotourl;
-      Curl_posttransfer(easy->easy_handle);
-
-      easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE);
-      /* We make sure that the pipe broken flag is reset
-         because in this case, it isn't an actual break */
-      easy->easy_handle->state.pipe_broke = FALSE;
-      if(CURLE_OK == easy->result) {
-        gotourl = strdup(easy->easy_handle->change.url);
-        if(gotourl) {
-          easy->easy_handle->change.url_changed = FALSE;
-          easy->result = Curl_follow(easy->easy_handle, gotourl, FALSE);
-          if(CURLE_OK == easy->result)
-            multistate(easy, CURLM_STATE_CONNECT);
-          else
-            free(gotourl);
-        }
-        else {
-          easy->result = CURLE_OUT_OF_MEMORY;
-          multistate(easy, CURLM_STATE_COMPLETED);
-          break;
-        }
-      }
-    }
-
-    easy->easy_handle->change.url_changed = FALSE;
-
-    switch(easy->state) {
-    case CURLM_STATE_INIT:
-      /* init this transfer. */
-      easy->result=Curl_pretransfer(easy->easy_handle);
-
-      if(CURLE_OK == easy->result) {
-        /* after init, go CONNECT */
-        multistate(easy, CURLM_STATE_CONNECT);
-        result = CURLM_CALL_MULTI_PERFORM;
-
-        easy->easy_handle->state.used_interface = Curl_if_multi;
-      }
-      break;
-
-    case CURLM_STATE_CONNECT:
-      /* Connect. We get a connection identifier filled in. */
-      Curl_pgrsTime(easy->easy_handle, TIMER_STARTSINGLE);
-      easy->result = Curl_connect(easy->easy_handle, &easy->easy_conn,
-                                  &async, &protocol_connect);
-
-      if(CURLE_OK == easy->result) {
-        /* Add this handle to the send pipeline */
-        Curl_addHandleToPipeline(easy->easy_handle,
-                                 easy->easy_conn->send_pipe);
-
-        if(async)
-          /* We're now waiting for an asynchronous name lookup */
-          multistate(easy, CURLM_STATE_WAITRESOLVE);
-        else {
-          /* after the connect has been sent off, go WAITCONNECT unless the
-             protocol connect is already done and we can go directly to
-             WAITDO! */
-          result = CURLM_CALL_MULTI_PERFORM;
-
-          if(protocol_connect) {
-            multistate(easy, CURLM_STATE_WAITDO);
-          } else {
-            multistate(easy, CURLM_STATE_WAITCONNECT);
-          }
-        }
-      }
-      break;
-
-    case CURLM_STATE_WAITRESOLVE:
-      /* awaiting an asynch name resolve to complete */
-    {
-      struct Curl_dns_entry *dns = NULL;
-
-      /* check if we have the name resolved by now */
-      easy->result = Curl_is_resolved(easy->easy_conn, &dns);
-
-      if(dns) {
-        /* Perform the next step in the connection phase, and then move on
-           to the WAITCONNECT state */
-        easy->result = Curl_async_resolved(easy->easy_conn,
-                                           &protocol_connect);
-
-        if(CURLE_OK != easy->result)
-          /* if Curl_async_resolved() returns failure, the connection struct
-             is already freed and gone */
-          easy->easy_conn = NULL;           /* no more connection */
-        else {
-          /* call again please so that we get the next socket setup */
-          result = CURLM_CALL_MULTI_PERFORM;
-          if(protocol_connect)
-            multistate(easy, CURLM_STATE_DO);
-          else
-            multistate(easy, CURLM_STATE_WAITCONNECT);
-        }
-      }
-
-      if(CURLE_OK != easy->result) {
-        /* failure detected */
-        Curl_disconnect(easy->easy_conn); /* disconnect properly */
-        easy->easy_conn = NULL;           /* no more connection */
-        break;
-      }
-    }
-    break;
-
-    case CURLM_STATE_WAITCONNECT:
-      /* awaiting a completion of an asynch connect */
-      easy->result = Curl_is_connected(easy->easy_conn,
-                                       FIRSTSOCKET,
-                                       &connected);
-      if(connected)
-        easy->result = Curl_protocol_connect(easy->easy_conn,
-                                             &protocol_connect);
-
-      if(CURLE_OK != easy->result) {
-        /* failure detected */
-        Curl_disconnect(easy->easy_conn); /* close the connection */
-        easy->easy_conn = NULL;           /* no more connection */
-        break;
-      }
-
-      if(connected) {
-        if(!protocol_connect) {
-          /* We have a TCP connection, but 'protocol_connect' may be false
-             and then we continue to 'STATE_PROTOCONNECT'. If protocol
-             connect is TRUE, we move on to STATE_DO. */
-          multistate(easy, CURLM_STATE_PROTOCONNECT);
-        }
-        else {
-          /* after the connect has completed, go WAITDO */
-          multistate(easy, CURLM_STATE_WAITDO);
-
-          result = CURLM_CALL_MULTI_PERFORM;
-        }
-      }
-      break;
-
-    case CURLM_STATE_PROTOCONNECT:
-      /* protocol-specific connect phase */
-      easy->result = Curl_protocol_connecting(easy->easy_conn,
-                                              &protocol_connect);
-      if(protocol_connect) {
-        /* after the connect has completed, go WAITDO */
-        multistate(easy, CURLM_STATE_WAITDO);
-        result = CURLM_CALL_MULTI_PERFORM;
-      }
-      else if(easy->result) {
-        /* failure detected */
-        Curl_posttransfer(easy->easy_handle);
-        Curl_done(&easy->easy_conn, easy->result, FALSE);
-        Curl_disconnect(easy->easy_conn); /* close the connection */
-        easy->easy_conn = NULL;           /* no more connection */
-      }
-      break;
-
-    case CURLM_STATE_WAITDO:
-      /* Wait for our turn to DO when we're pipelining requests */
-#ifdef CURLDEBUG
-      infof(easy->easy_handle, "Conn %d send pipe %d inuse %d athead %d\n",
-            easy->easy_conn->connectindex,
-            easy->easy_conn->send_pipe->size,
-            easy->easy_conn->writechannel_inuse,
-            Curl_isHandleAtHead(easy->easy_handle,
-                                easy->easy_conn->send_pipe));
-#endif
-      if (!easy->easy_conn->writechannel_inuse &&
-          Curl_isHandleAtHead(easy->easy_handle,
-                              easy->easy_conn->send_pipe)) {
-        /* Grab the channel */
-        easy->easy_conn->writechannel_inuse = TRUE;
-        multistate(easy, CURLM_STATE_DO);
-        result = CURLM_CALL_MULTI_PERFORM;
-      }
-      break;
-
-    case CURLM_STATE_DO:
-      if(easy->easy_handle->set.connect_only) {
-        /* keep connection open for application to use the socket */
-        easy->easy_conn->bits.close = FALSE;
-        multistate(easy, CURLM_STATE_DONE);
-        easy->result = CURLE_OK;
-        result = CURLM_OK;
-      }
-      else {
-        /* Perform the protocol's DO action */
-        easy->result = Curl_do(&easy->easy_conn,
-                               &dophase_done);
-
-        if(CURLE_OK == easy->result) {
-
-          if(!dophase_done) {
-            /* DO was not completed in one function call, we must continue
-               DOING... */
-            multistate(easy, CURLM_STATE_DOING);
-            result = CURLM_OK;
-          }
-
-          /* after DO, go DO_DONE... or DO_MORE */
-          else if(easy->easy_conn->bits.do_more) {
-            /* we're supposed to do more, but we need to sit down, relax
-               and wait a little while first */
-            multistate(easy, CURLM_STATE_DO_MORE);
-            result = CURLM_OK;
-          }
-          else {
-            /* we're done with the DO, now DO_DONE */
-            easy->result = Curl_readwrite_init(easy->easy_conn);
-            if(CURLE_OK == easy->result) {
-              multistate(easy, CURLM_STATE_DO_DONE);
-              result = CURLM_CALL_MULTI_PERFORM;
-            }
-          }
-        }
-        else {
-          /* failure detected */
-          Curl_posttransfer(easy->easy_handle);
-          Curl_done(&easy->easy_conn, easy->result, FALSE);
-          Curl_disconnect(easy->easy_conn); /* close the connection */
-          easy->easy_conn = NULL;           /* no more connection */
-        }
-      }
-      break;
-
-    case CURLM_STATE_DOING:
-      /* we continue DOING until the DO phase is complete */
-      easy->result = Curl_protocol_doing(easy->easy_conn,
-                                         &dophase_done);
-      if(CURLE_OK == easy->result) {
-        if(dophase_done) {
-          /* after DO, go PERFORM... or DO_MORE */
-          if(easy->easy_conn->bits.do_more) {
-            /* we're supposed to do more, but we need to sit down, relax
-               and wait a little while first */
-            multistate(easy, CURLM_STATE_DO_MORE);
-            result = CURLM_OK;
-          }
-          else {
-            /* we're done with the DO, now DO_DONE */
-            easy->result = Curl_readwrite_init(easy->easy_conn);
-            if(CURLE_OK == easy->result) {
-              multistate(easy, CURLM_STATE_DO_DONE);
-              result = CURLM_CALL_MULTI_PERFORM;
-            }
-          }
-        } /* dophase_done */
-      }
-      else {
-        /* failure detected */
-        Curl_posttransfer(easy->easy_handle);
-        Curl_done(&easy->easy_conn, easy->result, FALSE);
-        Curl_disconnect(easy->easy_conn); /* close the connection */
-        easy->easy_conn = NULL;           /* no more connection */
-      }
-      break;
-
-    case CURLM_STATE_DO_MORE:
-      /* Ready to do more? */
-      easy->result = Curl_is_connected(easy->easy_conn,
-                                       SECONDARYSOCKET,
-                                       &connected);
-      if(connected) {
-        /*
-         * When we are connected, DO MORE and then go DO_DONE
-         */
-        easy->result = Curl_do_more(easy->easy_conn);
-
-        if(CURLE_OK == easy->result)
-          easy->result = Curl_readwrite_init(easy->easy_conn);
-        else
-          /* Remove ourselves from the send pipeline */
-          Curl_removeHandleFromPipeline(easy->easy_handle,
-                                        easy->easy_conn->send_pipe);
-
-        if(CURLE_OK == easy->result) {
-          multistate(easy, CURLM_STATE_DO_DONE);
-          result = CURLM_CALL_MULTI_PERFORM;
-        }
-      }
-      break;
-
-    case CURLM_STATE_DO_DONE:
-      /* Remove ourselves from the send pipeline */
-      Curl_removeHandleFromPipeline(easy->easy_handle,
-                                    easy->easy_conn->send_pipe);
-      /* Add ourselves to the recv pipeline */
-      Curl_addHandleToPipeline(easy->easy_handle,
-                               easy->easy_conn->recv_pipe);
-      multistate(easy, CURLM_STATE_WAITPERFORM);
-      result = CURLM_CALL_MULTI_PERFORM;
-      break;
-
-    case CURLM_STATE_WAITPERFORM:
-#ifdef CURLDEBUG
-      infof(easy->easy_handle, "Conn %d recv pipe %d inuse %d athead %d\n",
-            easy->easy_conn->connectindex,
-            easy->easy_conn->recv_pipe->size,
-            easy->easy_conn->readchannel_inuse,
-            Curl_isHandleAtHead(easy->easy_handle,
-                                easy->easy_conn->recv_pipe));
-#endif
-      /* Wait for our turn to PERFORM */
-      if (!easy->easy_conn->readchannel_inuse &&
-          Curl_isHandleAtHead(easy->easy_handle,
-                              easy->easy_conn->recv_pipe)) {
-        /* Grab the channel */
-        easy->easy_conn->readchannel_inuse = TRUE;
-        multistate(easy, CURLM_STATE_PERFORM);
-        result = CURLM_CALL_MULTI_PERFORM;
-      }
-      break;
-
-    case CURLM_STATE_TOOFAST: /* limit-rate exceeded in either direction */
-      /* if both rates are within spec, resume transfer */
-      Curl_pgrsUpdate(easy->easy_conn);
-      if ( ( ( easy->easy_handle->set.max_send_speed == 0 ) ||
-             ( easy->easy_handle->progress.ulspeed <
-               easy->easy_handle->set.max_send_speed ) )  &&
-           ( ( easy->easy_handle->set.max_recv_speed == 0 ) ||
-             ( easy->easy_handle->progress.dlspeed <
-               easy->easy_handle->set.max_recv_speed ) )
-        )
-      multistate(easy, CURLM_STATE_PERFORM);
-      break;
-
-    case CURLM_STATE_PERFORM:
-      /* check if over speed */
-      if ( (  ( easy->easy_handle->set.max_send_speed > 0 ) &&
-              ( easy->easy_handle->progress.ulspeed >
-                easy->easy_handle->set.max_send_speed ) )  ||
-           (  ( easy->easy_handle->set.max_recv_speed > 0 ) &&
-              ( easy->easy_handle->progress.dlspeed >
-                easy->easy_handle->set.max_recv_speed ) )
-        ) {
-        /* Transfer is over the speed limit. Change state.  TODO: Call
-         * Curl_expire() with the time left until we're targeted to be below
-         * the speed limit again. */
-        multistate(easy, CURLM_STATE_TOOFAST );
-        break;
-      }
-
-      /* read/write data if it is ready to do so */
-      easy->result = Curl_readwrite(easy->easy_conn, &done);
-
-      k = &easy->easy_handle->reqdata.keep;
-
-      if (!(k->keepon & KEEP_READ)) {
-          /* We're done reading */
-          easy->easy_conn->readchannel_inuse = FALSE;
-      }
-
-      if (!(k->keepon & KEEP_WRITE)) {
-          /* We're done writing */
-          easy->easy_conn->writechannel_inuse = FALSE;
-      }
-
-      if(easy->result)  {
-        /* The transfer phase returned error, we mark the connection to get
-         * closed to prevent being re-used. This is becasue we can't
-         * possibly know if the connection is in a good shape or not now. */
-        easy->easy_conn->bits.close = TRUE;
-
-        if(CURL_SOCKET_BAD != easy->easy_conn->sock[SECONDARYSOCKET]) {
-          /* if we failed anywhere, we must clean up the secondary socket if
-             it was used */
-          sclose(easy->easy_conn->sock[SECONDARYSOCKET]);
-          easy->easy_conn->sock[SECONDARYSOCKET] = CURL_SOCKET_BAD;
-        }
-        Curl_posttransfer(easy->easy_handle);
-        Curl_done(&easy->easy_conn, easy->result, FALSE);
-      }
-      else if(TRUE == done) {
-        char *newurl;
-        bool retry = Curl_retry_request(easy->easy_conn, &newurl);
-
-        /* call this even if the readwrite function returned error */
-        Curl_posttransfer(easy->easy_handle);
-
-        /* When we follow redirects, must to go back to the CONNECT state */
-        if(easy->easy_handle->reqdata.newurl || retry) {
-          Curl_removeHandleFromPipeline(easy->easy_handle,
-                                        easy->easy_conn->recv_pipe);
-          if(!retry) {
-            /* if the URL is a follow-location and not just a retried request
-               then figure out the URL here */
-            newurl = easy->easy_handle->reqdata.newurl;
-            easy->easy_handle->reqdata.newurl = NULL;
-          }
-          easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE);
-          if(easy->result == CURLE_OK)
-            easy->result = Curl_follow(easy->easy_handle, newurl, retry);
-          if(CURLE_OK == easy->result) {
-            multistate(easy, CURLM_STATE_CONNECT);
-            result = CURLM_CALL_MULTI_PERFORM;
-          }
-          else
-            /* Since we "took it", we are in charge of freeing this on
-               failure */
-            free(newurl);
-        }
-        else {
-          /* after the transfer is done, go DONE */
-          multistate(easy, CURLM_STATE_DONE);
-          result = CURLM_CALL_MULTI_PERFORM;
-        }
-      }
-
-      break;
-
-    case CURLM_STATE_DONE:
-      /* Remove ourselves from the receive pipeline */
-      Curl_removeHandleFromPipeline(easy->easy_handle,
-                                    easy->easy_conn->recv_pipe);
-      easy->easy_handle->state.is_in_pipeline = FALSE;
-
-      if (easy->easy_conn->bits.stream_was_rewound) {
-          /* This request read past its response boundary so we quickly
-             let the other requests consume those bytes since there is no
-             guarantee that the socket will become active again */
-          result = CURLM_CALL_MULTI_PERFORM;
-      }
-
-      if (!easy->easy_handle->state.cancelled) {
-        /* post-transfer command */
-        easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE);
-
-        /* after we have DONE what we're supposed to do, go COMPLETED, and
-           it doesn't matter what the Curl_done() returned! */
-        multistate(easy, CURLM_STATE_COMPLETED);
-      }
-
-      break;
-
-    case CURLM_STATE_COMPLETED:
-      if (easy->easy_handle->state.cancelled)
-        /* Go into the CANCELLED state if we were cancelled */
-        multistate(easy, CURLM_STATE_CANCELLED);
-
-      /* this is a completed transfer, it is likely to still be connected */
-
-      /* This node should be delinked from the list now and we should post
-         an information message that we are complete. */
-      break;
-
-    case CURLM_STATE_CANCELLED:
-      /* Cancelled transfer, wait to be cleaned up */
-      break;
-
-    default:
-      return CURLM_INTERNAL_ERROR;
-    }
-
-    if(CURLM_STATE_COMPLETED != easy->state) {
-      if(CURLE_OK != easy->result) {
-        /*
-         * If an error was returned, and we aren't in completed state now,
-         * then we go to completed and consider this transfer aborted.
-         */
-        easy->easy_handle->state.is_in_pipeline = FALSE;
-        easy->easy_handle->state.pipe_broke = FALSE;
-
-        if(easy->easy_conn) {
-          /* if this has a connection, unsubscribe from the pipelines */
-          easy->easy_conn->writechannel_inuse = FALSE;
-          easy->easy_conn->readchannel_inuse = FALSE;
-        }
-        multistate(easy, CURLM_STATE_COMPLETED);
-      }
-    }
-
-  } while (easy->easy_handle->change.url_changed);
-
-  if ((CURLM_STATE_COMPLETED == easy->state) && !easy->msg) {
-    if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) {
-      /* clear out the usage of the shared DNS cache */
-      easy->easy_handle->dns.hostcache = NULL;
-      easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-    }
-
-    /* now add a node to the Curl_message linked list with this info */
-    msg = (struct Curl_message *)malloc(sizeof(struct Curl_message));
-
-    if(!msg)
-      return CURLM_OUT_OF_MEMORY;
-
-    msg->extmsg.msg = CURLMSG_DONE;
-    msg->extmsg.easy_handle = easy->easy_handle;
-    msg->extmsg.data.result = easy->result;
-    msg->next = NULL;
-
-    easy->msg = msg;
-    easy->msg_num = 1; /* there is one unread message here */
-
-    multi->num_msgs++; /* increase message counter */
-  }
-
-  return result;
-}
-
-
-CURLMcode curl_multi_perform(CURLM *multi_handle, int *running_handles)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  CURLMcode returncode=CURLM_OK;
-  struct Curl_tree *t;
-
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  easy=multi->easy.next;
-  while(easy) {
-    CURLMcode result;
-
-    if (easy->easy_handle->state.cancelled &&
-        easy->state == CURLM_STATE_CANCELLED) {
-      /* Remove cancelled handles once it's safe to do so */
-      Curl_multi_rmeasy(multi_handle, easy->easy_handle);
-      easy->easy_handle = NULL;
-      easy = easy->next;
-      continue;
-    }
-
-    result = multi_runsingle(multi, easy);
-    if(result)
-      returncode = result;
-
-    easy = easy->next; /* operate on next handle */
-  }
-
-  /*
-   * Simply remove all expired timers from the splay since handles are dealt
-   * with unconditionally by this function and curl_multi_timeout() requires
-   * that already passed/handled expire times are removed from the splay.
-   */
-  do {
-    struct timeval now = Curl_tvnow();
-    int key = now.tv_sec; /* drop the usec part */
-
-    multi->timetree = Curl_splaygetbest(key, multi->timetree, &t);
-    if (t) {
-      struct SessionHandle *d = t->payload;
-      struct timeval* tv = &d->state.expiretime;
-
-      /* clear the expire times within the handles that we remove from the
-         splay tree */
-      tv->tv_sec = 0;
-      tv->tv_usec = 0;
-    }
-
-  } while(t);
-
-  *running_handles = multi->num_alive;
-
-  if ( CURLM_OK == returncode )
-    update_timer(multi);
-  return returncode;
-}
-
-/* This is called when an easy handle is cleanup'ed that is part of a multi
-   handle */
-void Curl_multi_rmeasy(void *multi_handle, CURL *easy_handle)
-{
-  curl_multi_remove_handle(multi_handle, easy_handle);
-}
-
-
-CURLMcode curl_multi_cleanup(CURLM *multi_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  struct Curl_one_easy *nexteasy;
-  int i;
-  struct closure *cl;
-  struct closure *n;
-
-  if(GOOD_MULTI_HANDLE(multi)) {
-    multi->type = 0; /* not good anymore */
-    Curl_hash_destroy(multi->hostcache);
-    Curl_hash_destroy(multi->sockhash);
-
-    /* go over all connections that have close actions */
-    for(i=0; i< multi->connc->num; i++) {
-      if(multi->connc->connects[i] &&
-         multi->connc->connects[i]->protocol & PROT_CLOSEACTION) {
-        Curl_disconnect(multi->connc->connects[i]);
-        multi->connc->connects[i] = NULL;
-      }
-    }
-    /* now walk through the list of handles we kept around only to be
-       able to close connections "properly" */
-    cl = multi->closure;
-    while(cl) {
-      cl->easy_handle->state.shared_conn = NULL; /* no more shared */
-      if(cl->easy_handle->state.closed)
-        /* close handle only if curl_easy_cleanup() already has been called
-           for this easy handle */
-        Curl_close(cl->easy_handle);
-      n = cl->next;
-      free(cl);
-      cl= n;
-    }
-
-    Curl_rm_connc(multi->connc);
-
-    /* remove all easy handles */
-    easy = multi->easy.next;
-    while(easy) {
-      nexteasy=easy->next;
-      if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) {
-        /* clear out the usage of the shared DNS cache */
-        easy->easy_handle->dns.hostcache = NULL;
-        easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-      }
-
-      /* Clear the pointer to the connection cache */
-      easy->easy_handle->state.connc = NULL;
-
-      Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association */
-
-      if (easy->msg)
-        free(easy->msg);
-      free(easy);
-      easy = nexteasy;
-    }
-
-    free(multi);
-
-    return CURLM_OK;
-  }
-  else
-    return CURLM_BAD_HANDLE;
-}
-
-CURLMsg *curl_multi_info_read(CURLM *multi_handle, int *msgs_in_queue)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-
-  *msgs_in_queue = 0; /* default to none */
-
-  if(GOOD_MULTI_HANDLE(multi)) {
-    struct Curl_one_easy *easy;
-
-    if(!multi->num_msgs)
-      return NULL; /* no messages left to return */
-
-    easy=multi->easy.next;
-    while(easy) {
-      if(easy->msg_num) {
-        easy->msg_num--;
-        break;
-      }
-      easy = easy->next;
-    }
-    if(!easy)
-      return NULL; /* this means internal count confusion really */
-
-    multi->num_msgs--;
-    *msgs_in_queue = multi->num_msgs;
-
-    return &easy->msg->extmsg;
-  }
-  else
-    return NULL;
-}
-
-/*
- * singlesocket() checks what sockets we deal with and their "action state"
- * and if we have a different state in any of those sockets from last time we
- * call the callback accordingly.
- */
-static void singlesocket(struct Curl_multi *multi,
-                         struct Curl_one_easy *easy)
-{
-  curl_socket_t socks[MAX_SOCKSPEREASYHANDLE];
-  int i;
-  struct Curl_sh_entry *entry;
-  curl_socket_t s;
-  int num;
-  unsigned int curraction;
-
-  memset(&socks, 0, sizeof(socks));
-  for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++)
-    socks[i] = CURL_SOCKET_BAD;
-
-  /* Fill in the 'current' struct with the state as it is now: what sockets to
-     supervise and for what actions */
-  curraction = multi_getsock(easy, socks, MAX_SOCKSPEREASYHANDLE);
-
-  /* We have 0 .. N sockets already and we get to know about the 0 .. M
-     sockets we should have from now on. Detect the differences, remove no
-     longer supervised ones and add new ones */
-
-  /* walk over the sockets we got right now */
-  for(i=0; (i< MAX_SOCKSPEREASYHANDLE) &&
-        (curraction & (GETSOCK_READSOCK(i) | GETSOCK_WRITESOCK(i)));
-      i++) {
-    int action = CURL_POLL_NONE;
-
-    s = socks[i];
-
-    /* get it from the hash */
-    entry = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-
-    if(curraction & GETSOCK_READSOCK(i))
-      action |= CURL_POLL_IN;
-    if(curraction & GETSOCK_WRITESOCK(i))
-      action |= CURL_POLL_OUT;
-
-    if(entry) {
-      /* yeps, already present so check if it has the same action set */
-      if(entry->action == action)
-        /* same, continue */
-        continue;
-    }
-    else {
-      /* this is a socket we didn't have before, add it! */
-      entry = sh_addentry(multi->sockhash, s, easy->easy_handle);
-      if(!entry)
-        /* fatal */
-        return;
-    }
-
-    multi->socket_cb(easy->easy_handle,
-                     s,
-                     action,
-                     multi->socket_userp,
-                     entry ? entry->socketp : NULL);
-
-    entry->action = action; /* store the current action state */
-  }
-
-  num = i; /* number of sockets */
-
-  /* when we've walked over all the sockets we should have right now, we must
-     make sure to detect sockets that are removed */
-  for(i=0; i< easy->numsocks; i++) {
-    int j;
-    s = easy->sockets[i];
-    for(j=0; j<num; j++) {
-      if(s == socks[j]) {
-        /* this is still supervised */
-        s = CURL_SOCKET_BAD;
-        break;
-      }
-    }
-    if(s != CURL_SOCKET_BAD) {
-      /* this socket has been removed. Remove it */
-
-      entry = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-      if(entry) {
-        /* just a precaution, this socket really SHOULD be in the hash already
-           but in case it isn't, we don't have to tell the app to remove it
-           either since it never got to know about it */
-        multi->socket_cb(easy->easy_handle,
-                         s,
-                         CURL_POLL_REMOVE,
-                         multi->socket_userp,
-                         entry ? entry->socketp : NULL);
-
-        sh_delentry(multi->sockhash, s);
-      }
-    }
-  }
-
-  memcpy(easy->sockets, socks, num*sizeof(curl_socket_t));
-  easy->numsocks = num;
-}
-
-static CURLMcode multi_socket(struct Curl_multi *multi,
-                              bool checkall,
-                              curl_socket_t s,
-                              int *running_handles)
-{
-  CURLMcode result = CURLM_OK;
-  struct SessionHandle *data = NULL;
-  struct Curl_tree *t;
-
-  if(checkall) {
-    struct Curl_one_easy *easyp;
-    /* *perform() deals with running_handles on its own */
-    result = curl_multi_perform(multi, running_handles);
-
-    /* walk through each easy handle and do the socket state change magic
-       and callbacks */
-    easyp=multi->easy.next;
-    while(easyp) {
-      singlesocket(multi, easyp);
-      easyp = easyp->next;
-    }
-
-    /* or should we fall-through and do the timer-based stuff? */
-    return result;
-  }
-  else if (s != CURL_SOCKET_TIMEOUT) {
-
-    struct Curl_sh_entry *entry =
-      Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-
-    if(!entry)
-      /* unmatched socket, major problemo! */
-      return CURLM_BAD_SOCKET; /* better return code? */
-
-    data = entry->easy;
-
-    if(data->magic != CURLEASY_MAGIC_NUMBER)
-      /* bad bad bad bad bad bad bad */
-      return CURLM_INTERNAL_ERROR;
-
-    result = multi_runsingle(multi, data->set.one_easy);
-
-    if(result == CURLM_OK)
-      /* get the socket(s) and check if the state has been changed since
-         last */
-      singlesocket(multi, data->set.one_easy);
-
-    /* Now we fall-through and do the timer-based stuff, since we don't want
-       to force the user to have to deal with timeouts as long as at least one
-       connection in fact has traffic. */
-
-    data = NULL; /* set data to NULL again to avoid calling multi_runsingle()
-                    in case there's no need to */
-  }
-
-  /*
-   * The loop following here will go on as long as there are expire-times left
-   * to process in the splay and 'data' will be re-assigned for every expired
-   * handle we deal with.
-   */
-  do {
-    int key;
-    struct timeval now;
-
-    /* the first loop lap 'data' can be NULL */
-    if(data) {
-      result = multi_runsingle(multi, data->set.one_easy);
-
-      if(result == CURLM_OK)
-        /* get the socket(s) and check if the state has been changed since
-           last */
-        singlesocket(multi, data->set.one_easy);
-    }
-
-    /* Check if there's one (more) expired timer to deal with! This function
-       extracts a matching node if there is one */
-
-    now = Curl_tvnow();
-    key = now.tv_sec; /* drop the usec part */
-
-    multi->timetree = Curl_splaygetbest(key, multi->timetree, &t);
-    if(t) {
-      /* assign 'data' to be the easy handle we just removed from the splay
-         tree */
-      data = t->payload;
-      /* clear the expire time within the handle we removed from the
-         splay tree */
-      data->state.expiretime.tv_sec = 0;
-      data->state.expiretime.tv_usec = 0;
-    }
-
-  } while(t);
-
-  *running_handles = multi->num_alive;
-  return result;
-}
-
-CURLMcode curl_multi_setopt(CURLM *multi_handle,
-                            CURLMoption option, ...)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  CURLMcode res = CURLM_OK;
-  va_list param;
-
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  va_start(param, option);
-
-  switch(option) {
-  case CURLMOPT_SOCKETFUNCTION:
-    multi->socket_cb = va_arg(param, curl_socket_callback);
-    break;
-  case CURLMOPT_SOCKETDATA:
-    multi->socket_userp = va_arg(param, void *);
-    break;
-  case CURLMOPT_PIPELINING:
-    multi->pipelining_enabled = (bool)(0 != va_arg(param, long));
-    break;
-  case CURLMOPT_TIMERFUNCTION:
-    multi->timer_cb = va_arg(param, curl_multi_timer_callback);
-    break;
-  case CURLMOPT_TIMERDATA:
-    multi->timer_userp = va_arg(param, void *);
-    break;
-  default:
-    res = CURLM_UNKNOWN_OPTION;
-    break;
-  }
-  va_end(param);
-  return res;
-}
-
-
-CURLMcode curl_multi_socket_all(CURLM *multi_handle, int *running_handles)
-
-{
-  CURLMcode result = multi_socket((struct Curl_multi *)multi_handle,
-                                  TRUE, CURL_SOCKET_BAD, running_handles);
-  if (CURLM_OK == result)
-    update_timer((struct Curl_multi *)multi_handle);
-  return result;
-}
-
-static CURLMcode multi_timeout(struct Curl_multi *multi,
-                               long *timeout_ms)
-{
-  if(multi->timetree) {
-    /* we have a tree of expire times */
-    struct timeval now = Curl_tvnow();
-
-    /* splay the lowest to the bottom */
-    multi->timetree = Curl_splay(0, multi->timetree);
-
-    /* At least currently, the splay key is a time_t for the expire time */
-    *timeout_ms = (multi->timetree->key - now.tv_sec) * 1000 -
-      now.tv_usec/1000;
-    if(*timeout_ms < 0)
-      /* 0 means immediately */
-      *timeout_ms = 0;
-  }
-  else
-    *timeout_ms = -1;
-
-  return CURLM_OK;
-}
-
-CURLMcode curl_multi_timeout(CURLM *multi_handle,
-                             long *timeout_ms)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-
-  /* First, make some basic checks that the CURLM handle is a good handle */
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  return multi_timeout(multi, timeout_ms);
-}
-
-/*
- * Tell the application it should update its timers, if it subscribes to the
- * update timer callback.
- */
-static int update_timer(struct Curl_multi *multi)
-{
-  long timeout_ms;
-  if (!multi->timer_cb)
-    return 0;
-  if ( multi_timeout(multi, &timeout_ms) != CURLM_OK )
-    return -1;
-  if ( timeout_ms < 0 )
-    return 0;
-
-  /* When multi_timeout() is done, multi->timetree points to the node with the
-   * timeout we got the (relative) time-out time for. We can thus easily check
-   * if this is the same (fixed) time as we got in a previous call and then
-   * avoid calling the callback again. */
-  if(multi->timetree->key == multi->timer_lastcall)
-    return 0;
-
-  multi->timer_lastcall = multi->timetree->key;
-
-  return multi->timer_cb((CURLM*)multi, timeout_ms, multi->timer_userp);
-}
-
-/* given a number of milliseconds from now to use to set the 'act before
-   this'-time for the transfer, to be extracted by curl_multi_timeout() */
-void Curl_expire(struct SessionHandle *data, long milli)
-{
-  struct Curl_multi *multi = data->multi;
-  struct timeval *nowp = &data->state.expiretime;
-  int rc;
-
-  /* this is only interesting for multi-interface using libcurl, and only
-     while there is still a multi interface struct remaining! */
-  if(!multi)
-    return;
-
-  if(!milli) {
-    /* No timeout, clear the time data. */
-    if(nowp->tv_sec) {
-      /* Since this is an cleared time, we must remove the previous entry from
-         the splay tree */
-      rc = Curl_splayremovebyaddr(multi->timetree,
-                                  &data->state.timenode,
-                                  &multi->timetree);
-      if(rc)
-        infof(data, "Internal error clearing splay node = %d\n", rc);
-      infof(data, "Expire cleared\n");
-      nowp->tv_sec = 0;
-      nowp->tv_usec = 0;
-    }
-  }
-  else {
-    struct timeval set;
-    int rest;
-
-    set = Curl_tvnow();
-    set.tv_sec += milli/1000;
-    set.tv_usec += (milli%1000)*1000;
-
-    rest = (int)(set.tv_usec - 1000000);
-    if(rest > 0) {
-      /* bigger than a full microsec */
-      set.tv_sec++;
-      set.tv_usec -= 1000000;
-    }
-
-    if(nowp->tv_sec) {
-      /* This means that the struct is added as a node in the splay tree.
-         Compare if the new time is earlier, and only remove-old/add-new if it
-         is. */
-      long diff = curlx_tvdiff(set, *nowp);
-      if(diff > 0)
-        /* the new expire time was later so we don't change this */
-        return;
-
-      /* Since this is an updated time, we must remove the previous entry from
-         the splay tree first and then re-add the new value */
-      rc = Curl_splayremovebyaddr(multi->timetree,
-                                  &data->state.timenode,
-                                  &multi->timetree);
-      if(rc)
-        infof(data, "Internal error removing splay node = %d\n", rc);
-    }
-
-    *nowp = set;
-#if 0
-    infof(data, "Expire at %ld / %ld (%ldms)\n",
-          (long)nowp->tv_sec, (long)nowp->tv_usec, milli);
-#endif
-    data->state.timenode.payload = data;
-    multi->timetree = Curl_splayinsert((int)nowp->tv_sec,
-                                       multi->timetree,
-                                       &data->state.timenode);
-  }
-#if 0
-  Curl_splayprint(multi->timetree, 0, TRUE);
-#endif
-}
-
-CURLMcode curl_multi_assign(CURLM *multi_handle,
-                            curl_socket_t s, void *hashp)
-{
-  struct Curl_sh_entry *there = NULL;
-  struct Curl_multi *multi = (struct Curl_multi *)multi_handle;
-
-  if(s != CURL_SOCKET_BAD)
-    there = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(curl_socket_t));
-
-  if(!there)
-    return CURLM_BAD_SOCKET;
-
-  there->socketp = hashp;
-
-  return CURLM_OK;
-}
-
-static bool multi_conn_using(struct Curl_multi *multi,
-                             struct SessionHandle *data)
-{
-  /* any live CLOSEACTION-connections pointing to the give 'data' ? */
-  int i;
-
-  for(i=0; i< multi->connc->num; i++) {
-    if(multi->connc->connects[i] &&
-       (multi->connc->connects[i]->data == data) &&
-       multi->connc->connects[i]->protocol & PROT_CLOSEACTION)
-      return TRUE;
-  }
-
-  return FALSE;
-}
-
-/* Add the given data pointer to the list of 'closure handles' that are kept
-   around only to be able to close some connections nicely - just make sure
-   that this handle isn't already added, like for the cases when an easy
-   handle is removed, added and removed again... */
-static void add_closure(struct Curl_multi *multi,
-                        struct SessionHandle *data)
-{
-  int i;
-  struct closure *cl = (struct closure *)calloc(sizeof(struct closure), 1);
-  struct closure *p=NULL;
-  struct closure *n;
-  if(cl) {
-    cl->easy_handle = data;
-    cl->next = multi->closure;
-    multi->closure = cl;
-  }
-
-  p = multi->closure;
-  cl = p->next; /* start immediately on the second since the first is the one
-                   we just added and it is _very_ likely to actually exist
-                   used in the cache since that's the whole purpose of adding
-                   it to this list! */
-
-  /* When adding, scan through all the other currently kept handles and see if
-     there are any connections still referring to them and kill them if not. */
-  while(cl) {
-    bool inuse = FALSE;
-    for(i=0; i< multi->connc->num; i++) {
-      if(multi->connc->connects[i] &&
-         (multi->connc->connects[i]->data == cl->easy_handle)) {
-        inuse = TRUE;
-        break;
-      }
-    }
-
-    n = cl->next;
-
-    if(!inuse) {
-      /* cl->easy_handle is now killable */
-      infof(data, "Delayed kill of easy handle %p\n", cl->easy_handle);
-      /* unmark it as not having a connection around that uses it anymore */
-      cl->easy_handle->state.shared_conn= NULL;
-      Curl_close(cl->easy_handle);
-      if(p)
-        p->next = n;
-      else
-        multi->closure = n;
-      free(cl);
-    }
-    else
-      p = cl;
-
-    cl = n;
-  }
-
-}
-
-#ifdef CURLDEBUG
-void curl_multi_dump(CURLM *multi_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  int i;
-  fprintf(stderr, "* Multi status: %d handles, %d alive\n",
-          multi->num_easy, multi->num_alive);
-  for(easy=multi->easy.next; easy; easy = easy->next) {
-    if(easy->state != CURLM_STATE_COMPLETED) {
-      /* only display handles that are not completed */
-      fprintf(stderr, "handle %p, state %s, %d sockets\n",
-              (void *)easy->easy_handle,
-              statename[easy->state], easy->numsocks);
-      for(i=0; i < easy->numsocks; i++) {
-        curl_socket_t s = easy->sockets[i];
-        struct Curl_sh_entry *entry =
-          Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-
-        fprintf(stderr, "%d ", (int)s);
-        if(!entry) {
-          fprintf(stderr, "INTERNAL CONFUSION\n");
-          continue;
-        }
-        fprintf(stderr, "[%s %s] ",
-                entry->action&CURL_POLL_IN?"RECVING":"",
-                entry->action&CURL_POLL_OUT?"SENDING":"");
-      }
-      if(easy->numsocks)
-        fprintf(stderr, "\n");
-    }
-  }
-}
-#endif
diff --git a/exsrc/src/h5diff_correct_ansi.c b/exsrc/src/h5diff_correct_ansi.c
deleted file mode 100644
index a15e3ff278bf2966f3691fb32865db821661bc5f..0000000000000000000000000000000000000000
--- a/exsrc/src/h5diff_correct_ansi.c
+++ /dev/null
@@ -1,2222 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * Copyright by the Board of Trustees of the University of Illinois.         *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the files COPYING and Copyright.html.  COPYING can be found at the root   *
- * of the source code distribution tree; Copyright.html can be found at the  *
- * root level of an installed copy of the electronic HDF5 document set and   *
- * is linked from the top-level documents page.  It can also be found at     *
- * http://hdfgroup.org/HDF5/doc/Copyright.html.  If you do not have          *
- * access to either file, you may request a copy from help@hdfgroup.org.     *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-#include <stdlib.h>
-
-#include "H5private.h"
-#include "h5tools.h"
-#include "h5tools_utils.h"
-#include "h5diff.h"
-#include "ph5diff.h"
-
-/*
- * Debug printf macros. The prefix allows output filtering by test scripts.
- */
-#ifdef H5DIFF_DEBUG
-#define h5diffdebug(x) fprintf(stderr, "h5diff debug: " x)
-#define h5diffdebug2(x1, x2) fprintf(stderr, "h5diff debug: " x1, x2)
-#define h5diffdebug3(x1, x2, x3) fprintf(stderr, "h5diff debug: " x1, x2, x3)
-#define h5diffdebug4(x1, x2, x3, x4) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4)
-#define h5diffdebug5(x1, x2, x3, x4, x5) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4, x5)
-#else
-#define h5diffdebug(x)
-#define h5diffdebug2(x1, x2)
-#define h5diffdebug3(x1, x2, x3)
-#define h5diffdebug4(x1, x2, x3, x4)
-#define h5diffdebug5(x1, x2, x3, x4, x5)
-#endif
-
-
-/*-------------------------------------------------------------------------
- * Function: print_objname
- *
- * Purpose: check if object name is to be printed, only when:
- *  1) verbose mode
- *  2) when diff was found (normal mode)
- *-------------------------------------------------------------------------
- */
-int print_objname (diff_opt_t * options, hsize_t nfound)
-{
-    return ((options->m_verbose || nfound) && !options->m_quiet) ? 1 : 0;
-}
-
-/*-------------------------------------------------------------------------
- * Function: do_print_objname
- *
- * Purpose: print object name
- *
- *-------------------------------------------------------------------------
- */
-void do_print_objname (const char *OBJ, const char *path1, const char *path2, diff_opt_t * opts)
-{
-    /* if verbose level is higher than 0, put space line before
-     * displaying any object or symbolic links. This improves
-     * readability of the output. 
-     */
-    if (opts->m_verbose_level >= 1)
-        parallel_print("\n");
-    parallel_print("%-7s: <%s> and <%s>\n", OBJ, path1, path2);
-}
-
-/*-------------------------------------------------------------------------
- * Function: do_print_attrname
- *
- * Purpose: print attribute name
- *
- *-------------------------------------------------------------------------
- */
-void
-do_print_attrname (const char *attr, const char *path1, const char *path2)
-{
-    parallel_print("%-7s: <%s> and <%s>\n", attr, path1, path2);
-}
-
-/*-------------------------------------------------------------------------
- * Function: print_warn
- *
- * Purpose: check print warning condition.
- * Return: 
- *    1 if verbose mode
- *    0 if not verbos mode
- * Programmer: Jonathan Kim
- * Date: Feb 4, 2010
- *-------------------------------------------------------------------------
- */
-static int print_warn(diff_opt_t *options)
-{
-    return ((options->m_verbose))?1:0;
-}
-
-
-#ifdef H5_HAVE_PARALLEL
-/*-------------------------------------------------------------------------
- * Function: phdiff_dismiss_workers
- *
- * Purpose: tell all workers to end.
- *
- * Return: none
- *
- * Programmer: Albert Cheng
- *
- * Date: Feb 6, 2005
- *
- *-------------------------------------------------------------------------
- */
-void phdiff_dismiss_workers(void)
-{
-    int i;
-    for(i=1; i<g_nTasks; i++)
-        MPI_Send(NULL, 0, MPI_BYTE, i, MPI_TAG_END, MPI_COMM_WORLD);
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: print_manager_output
- *
- * Purpose: special function that prints any output accumulated by the
- *      manager task.
- *
- * Return: none
- *
- * Programmer: Leon Arber
- *
- * Date: Feb 7, 2005
- *
- *-------------------------------------------------------------------------
- */
-void print_manager_output(void)
-{
-    /* If there was something we buffered, let's print it now */
-    if( (outBuffOffset>0) && g_Parallel)
-    {
-        printf("%s", outBuff);
-
-        if(overflow_file)
-        {
-            int     tmp;
-            rewind(overflow_file);
-            while((tmp = getc(overflow_file)) >= 0)
-                putchar(tmp);
-            fclose(overflow_file);
-            overflow_file = NULL;
-        }
-
-        fflush(stdout);
-        memset(outBuff, 0, OUTBUFF_SIZE);
-        outBuffOffset = 0;
-    }
-    else if( (outBuffOffset>0) && !g_Parallel)
-    {
-        fprintf(stderr, "h5diff error: outBuffOffset>0, but we're not in parallel!\n");
-    }
-}
-
-/*-------------------------------------------------------------------------
- * Function: print_incoming_data
- *
- * Purpose: special function that prints any output that has been sent to the manager
- *      and is currently sitting in the incoming message queue
- *
- * Return: none
- *
- * Programmer: Leon Arber
- *
- * Date: March 7, 2005
- *
- *-------------------------------------------------------------------------
- */
-
-static void print_incoming_data(void)
-{
-    char data[PRINT_DATA_MAX_SIZE+1];
-    int  incomingMessage;
-    MPI_Status Status;
-
-    do
-    {
-        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &incomingMessage, &Status);
-        if(incomingMessage)
-        {
-            memset(data, 0, PRINT_DATA_MAX_SIZE+1);
-            MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status);
-
-            printf("%s", data);
-        }
-    } while(incomingMessage);
-}
-#endif
-
-/*-------------------------------------------------------------------------
- * Function: is_valid_options
- *
- * Purpose: check if options are valid
- *
- * Return: 
- *  1 : Valid
- *  0 : Not valid
- *
- * Programmer: Jonathan Kim
- *
- * Date: Feb 17, 2010
- *
- *------------------------------------------------------------------------*/
-static int is_valid_options(diff_opt_t *options)
-{
-    int ret=1; /* init to valid */
-
-    /*-----------------------------------------------
-     * no -q(quiet) with -v (verbose) or -r (report) */
-    if(options->m_quiet && (options->m_verbose || options->m_report))
-    {
-        parallel_print("Error: -q (quiet mode) cannot be added to verbose or report modes\n");
-        options->err_stat=1;
-        ret = 0;
-        goto out;
-    }
-
-    /* -------------------------------------------------------
-     * only allow --no-dangling-links along with --follow-symlinks */
-    if(options->no_dangle_links && !options->follow_links)
-    {
-        parallel_print("Error: --no-dangling-links must be used along with --follow-symlinks option.\n");
-        options->err_stat=1;
-        ret = 0;
-        goto out;
-    }
-
-out:
-
-    return ret;
-}
-
-/*-------------------------------------------------------------------------
- * Function: is_exclude_path
- *
- * Purpose: check if 'paths' are part of exclude path list
- *
- * Return:  
- *   1 - excluded path
- *   0 - not excluded path
- * 
- * Programmer: Jonathan Kim
- * Date: Aug 23, 2010
- *------------------------------------------------------------------------*/
-static int is_exclude_path (char * path, h5trav_type_t type, diff_opt_t *options)
-{
-    struct exclude_path_list * exclude_path_ptr;
-    int ret_cmp;
-    int ret = 0;
-    int len_grp;
-
-    /* check if exclude path option is given */
-    if (!options->exclude_path)
-        goto out;
-
-    /* assign to local exclude list pointer */
-    exclude_path_ptr = options->exclude;
-
-    /* search objects in exclude list */
-    while (NULL != exclude_path_ptr)
-    {
-        /* if given object is group, exclude its members as well */
-        if (exclude_path_ptr->obj_type == H5TRAV_TYPE_GROUP)
-        {
-            ret_cmp = HDstrncmp(exclude_path_ptr->obj_path, path,
-                                strlen(exclude_path_ptr->obj_path));
-            if (ret_cmp == 0)
-            {
-                /* check if given path belong to an excluding group, if so 
-                 * exclude it as well.
-                 * This verifies if “/grp1/dset1” is only under “/grp1”, but
-                 * not under “/grp1xxx/” group.  
-                 */ 
-                len_grp = HDstrlen(exclude_path_ptr->obj_path);
-                if (path[len_grp] == '/')
-                {
-                    /* belong to excluded group! */
-                    ret = 1;
-                    break;  /* while */
-                }
-            }
-        }
-        /* exclude target is not group, just exclude the object */
-        else  
-        {
-            ret_cmp = HDstrcmp(exclude_path_ptr->obj_path, path);
-            if (ret_cmp == 0)
-            {
-                /* excluded non-group object */
-                ret = 1;
-                /* assign type as scan progress, which is sufficient to 
-                 * determine type for excluding groups from the above if. */
-                exclude_path_ptr->obj_type = type;
-                break; /* while */
-            }
-        }
-        exclude_path_ptr = exclude_path_ptr->next;
-    }
-
-out:
-    return  ret;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: free_exclude_path_list
- *
- * Purpose: free exclud object list from diff options
- *
- * Programmer: Jonathan Kim
- * Date: Aug 23, 2010
- *------------------------------------------------------------------------*/
-static void free_exclude_path_list(diff_opt_t *options)
-{
-    struct exclude_path_list * curr = options->exclude;
-    struct exclude_path_list * next;
-
-    while (NULL != curr)
-    {
-        next = curr->next;
-        HDfree(curr);
-        curr = next;
-    }
-}
-
-/*-------------------------------------------------------------------------
- * Function: build_match_list
- *
- * Purpose: get list of matching path_name from info1 and info2
- *
- * Note:
- *  Find common objects; the algorithm used for this search is the
- *  cosequential match algorithm and is described in
- *  Folk, Michael; Zoellick, Bill. (1992). File Structures. Addison-Wesley.
- *  Moved out from diff_match() to make code more flexible.
- *
- * Parameter:
- *  table_out [OUT] : return the list
- *
- * Programmer: Jonathan Kim
- *
- * Date: Aug 18, 2010
- *------------------------------------------------------------------------*/
-static void build_match_list (const char *objname1, trav_info_t *info1, const char *objname2, trav_info_t *info2, trav_table_t ** table_out, diff_opt_t *options)
-{
-    unsigned i;
-    size_t curr1 = 0;
-    size_t curr2 = 0;
-    unsigned infile[2];
-    char * path1_lp;
-    char * path2_lp;
-    h5trav_type_t type1_l;
-    h5trav_type_t type2_l;
-    int path1_offset = 0;
-    int path2_offset = 0;
-    int cmp;
-    trav_table_t *table;
-    size_t  idx;
-
-    /* init */
-    trav_table_init( &table );
-
-    /*
-     * This is necessary for the case that given objects are group and
-     * have different names (ex: obj1 is /grp1 and obj2 is /grp5).
-     * All the objects belong to given groups are the cadidates.
-     * So prepare to compare paths without the group names.
-     */
-    /* if obj1 is not root */
-    if (HDstrcmp (objname1,"/") != 0)
-        path1_offset = HDstrlen(objname1);
-    /* if obj2 is not root */
-    if (HDstrcmp (objname2,"/") != 0)
-        path2_offset = HDstrlen(objname2);
-
-    /*--------------------------------------------------
-    * build the list
-    */
-    while(curr1 < info1->nused && curr2 < info2->nused)
-    {
-        
-        path1_lp = (info1->paths[curr1].path) + path1_offset;
-        path2_lp = (info2->paths[curr2].path) + path2_offset;
-        type1_l = info1->paths[curr1].type;
-        type2_l = info2->paths[curr2].type;
-        
-        /* criteria is string compare */
-        cmp = HDstrcmp(path1_lp, path2_lp);
-
-        if(cmp == 0) {
-            if(!is_exclude_path(path1_lp, type1_l, options))
-            {
-                infile[0] = 1;
-                infile[1] = 1;
-                trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table);
-                /* if the two point to the same target object,
-                 * mark that in table */
-                if (info1->paths[curr1].fileno == info2->paths[curr2].fileno &&
-                    info1->paths[curr1].objno == info2->paths[curr2].objno )
-                {
-                    idx = table->nobjs - 1;
-                    table->objs[idx].is_same_trgobj = 1;
-                }
-            }
-            curr1++;
-            curr2++;
-        } /* end if */
-        else if(cmp < 0)
-        {
-            if(!is_exclude_path(path1_lp, type1_l, options))
-            {
-                infile[0] = 1;
-                infile[1] = 0;
-                trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table);
-            }
-            curr1++;
-        } /* end else-if */
-        else
-        {
-            if (!is_exclude_path(path2_lp, type2_l, options))
-            {
-                infile[0] = 0;
-                infile[1] = 1;
-                trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table);
-            }
-            curr2++;
-        } /* end else */
-    } /* end while */
-
-    /* list1 did not end */
-    infile[0] = 1;
-    infile[1] = 0;
-    while(curr1 < info1->nused)
-    {
-        if(!is_exclude_path(path1_lp, type1_l, options))
-        {
-            path1_lp = (info1->paths[curr1].path) + path1_offset;
-            trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table);
-        }
-        curr1++;
-    } /* end while */
-
-    /* list2 did not end */
-    infile[0] = 0;
-    infile[1] = 1;
-    while(curr2 < info2->nused)
-    {
-        if (!is_exclude_path(path2_lp, type2_l, options))
-        {
-            path2_lp = (info2->paths[curr2].path) + path2_offset;
-            trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table);
-        } 
-        curr2++;
-    } /* end while */
-
-    free_exclude_path_list (options);
-   /*------------------------------------------------------
-    * print the list
-    */
-    if(options->m_verbose)
-    {
-        parallel_print("\n");
-        /* if given objects is group under root */
-        if (HDstrcmp (objname1,"/") || HDstrcmp (objname2,"/"))
-            parallel_print("group1   group2\n");
-        else
-            parallel_print("file1     file2\n");
-        parallel_print("---------------------------------------\n");
-        for(i = 0; i < table->nobjs; i++) 
-        {
-            char c1, c2;
-            c1 = (table->objs[i].flags[0]) ? 'x' : ' ';
-            c2 = (table->objs[i].flags[1]) ? 'x' : ' ';
-            parallel_print("%5c %6c    %-15s\n", c1, c2, table->objs[i].name);
-        } /* end for */
-        parallel_print ("\n");
-    } /* end if */
-
-    *table_out = table;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: trav_grp_objs
- *
- * Purpose: 
- *  Call back function from h5trav_visit(). 
- *
- * Programmer: Jonathan Kim
- *
- * Date: Aug 16, 2010
- *------------------------------------------------------------------------*/
-static herr_t trav_grp_objs(const char *path, const H5O_info_t *oinfo,
-    const char *already_visited, void *udata)
-{
-    trav_info_visit_obj(path, oinfo, already_visited, udata);
-
-    return 0;
-} 
-
-/*-------------------------------------------------------------------------
- * Function: trav_grp_symlinks
- *
- * Purpose: 
- *  Call back function from h5trav_visit(). 
- *  Track and extra checkings while visiting all symbolic-links.
- *
- * Programmer: Jonathan Kim
- *
- * Date: Aug 16, 2010
- *------------------------------------------------------------------------*/
-static herr_t trav_grp_symlinks(const char *path, const H5L_info_t *linfo, 
-                               void *udata)
-{                               
-    trav_info_t *tinfo = (trav_info_t *)udata;
-    diff_opt_t *opts = (diff_opt_t *)tinfo->opts;
-    int ret;
-    h5tool_link_info_t lnk_info;
-    const char *ext_fname;
-    const char *ext_path;
-
-    /* init linkinfo struct */
-    memset(&lnk_info, 0, sizeof(h5tool_link_info_t));
-
-    if (!opts->follow_links)
-    {
-        trav_info_visit_lnk(path, linfo, tinfo);
-        goto done;
-    }
-
-    switch(linfo->type)
-    {
-    case H5L_TYPE_SOFT:
-        ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links);
-        /* error */
-        if (ret < 0)
-            goto done;
-        /* no dangling link option given and detect dangling link */
-        else if (ret == 0)
-        {
-            tinfo->symlink_visited.dangle_link = TRUE;
-            trav_info_visit_lnk(path, linfo, tinfo);
-            if (opts->no_dangle_links)
-                opts->err_stat = 1; /* make dgangling link is error */
-            goto done;
-        }
-
-        /* check if already visit the target object */        
-        if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path)) 
-            goto done;
-
-        /* add this link as visited link */
-        if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path) < 0) 
-            goto done;
-                
-        if(h5trav_visit(tinfo->fid, path, TRUE, TRUE,
-                     trav_grp_objs,trav_grp_symlinks, tinfo) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            opts->err_stat = 1;
-            goto done;
-        }
-        break;
-    
-    case H5L_TYPE_EXTERNAL:    
-        ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links);
-        /* error */
-        if (ret < 0)
-            goto done;
-        /* no dangling link option given and detect dangling link */
-        else if (ret == 0)
-        {
-            tinfo->symlink_visited.dangle_link = TRUE;
-            trav_info_visit_lnk(path, linfo, tinfo);
-            if (opts->no_dangle_links)
-                opts->err_stat = 1; /* make dgangling link is error */
-            goto done;
-        }
-
-        if(H5Lunpack_elink_val(lnk_info.trg_path, linfo->u.val_size, NULL, &ext_fname, &ext_path) < 0) 
-            goto done;
-
-        /* check if already visit the target object */        
-        if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path)) 
-            goto done;
-
-        /* add this link as visited link */
-        if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path) < 0) 
-            goto done;
-                
-        if(h5trav_visit(tinfo->fid, path, TRUE, TRUE,
-                        trav_grp_objs,trav_grp_symlinks, tinfo) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            opts->err_stat = 1;
-            goto done;
-        }
-        break;
-    default:
-        ;
-        break;
-    } /* end of switch */
-
-done:    
-    if (lnk_info.trg_path)
-        HDfree(lnk_info.trg_path);
-    return 0;
-}    
-
-
-/*-------------------------------------------------------------------------
- * Function: h5diff
- *
- * Purpose: public function, can be called in an application program.
- *   return differences between 2 HDF5 files
- *
- * Return: Number of differences found.
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- *
- * Date: October 22, 2003
- *
- *-------------------------------------------------------------------------
- */
-hsize_t h5diff(const char *fname1,
-               const char *fname2,
-               const char *objname1,
-               const char *objname2,
-               diff_opt_t *options)
-{
-    hid_t        file1_id = (-1);
-    hid_t        file2_id = (-1);
-    char         filenames[2][MAX_FILENAME];
-    hsize_t      nfound = 0;
-    int i;
-    int l_ret;
-    const char * obj1fullname = NULL;
-    const char * obj2fullname = NULL;
-    /* init to group type */
-    h5trav_type_t obj1type = H5TRAV_TYPE_GROUP;
-    h5trav_type_t obj2type = H5TRAV_TYPE_GROUP;
-    /* for single object */
-    H5O_info_t oinfo1, oinfo2; /* object info */
-    trav_info_t  *info1_obj = NULL;
-    trav_info_t  *info2_obj = NULL;
-    /* for group object */
-    trav_info_t  *info1_grp = NULL;
-    trav_info_t  *info2_grp = NULL;
-    /* local pointer */
-    trav_info_t  *info1_lp;
-    trav_info_t  *info2_lp;
-    /* link info from specified object */
-    H5L_info_t src_linfo1;
-    H5L_info_t src_linfo2;
-    /* link info from member object */
-    h5tool_link_info_t trg_linfo1;
-    h5tool_link_info_t trg_linfo2;
-    /* list for common objects */
-    trav_table_t *match_list = NULL;
-
-    /* init filenames */
-    HDmemset(filenames, 0, MAX_FILENAME * 2);
-    /* init link info struct */
-    HDmemset(&trg_linfo1, 0, sizeof(h5tool_link_info_t));
-    HDmemset(&trg_linfo2, 0, sizeof(h5tool_link_info_t));
-
-   /*-------------------------------------------------------------------------
-    * check invalid combination of options
-    *-----------------------------------------------------------------------*/
-    if(!is_valid_options(options))
-        goto out;
-
-    options->cmn_objs = 1; /* eliminate warning */
-
-    /*-------------------------------------------------------------------------
-    * open the files first; if they are not valid, no point in continuing
-    *-------------------------------------------------------------------------
-    */
-
-    /* disable error reporting */
-    H5E_BEGIN_TRY
-    {
-        /* open file 1 */
-        if((file1_id = h5tools_fopen(fname1, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) 
-        {
-            parallel_print("h5diff: <%s>: unable to open file\n", fname1);
-            options->err_stat = 1;
-            goto out;
-        } /* end if */
-
-
-        /* open file 2 */
-        if((file2_id = h5tools_fopen(fname2, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) 
-        {
-            parallel_print("h5diff: <%s>: unable to open file\n", fname2);
-            options->err_stat = 1;
-            goto out;
-        } /* end if */
-    /* enable error reporting */
-    } H5E_END_TRY;
-
-    /*-------------------------------------------------------------------------
-    * Initialize the info structs
-    *-------------------------------------------------------------------------
-    */
-    trav_info_init(fname1, file1_id, &info1_obj);
-    trav_info_init(fname2, file2_id, &info2_obj);
-
-    /* if any object is specified */
-    if (objname1)
-    {
-        /* malloc 2 more for "/" and end-of-line */
-        obj1fullname = (char*)HDcalloc(HDstrlen(objname1) + 2, sizeof(char));
-        obj2fullname = (char*)HDcalloc(HDstrlen(objname2) + 2, sizeof(char));
-
-        /* make the given object1 fullpath, start with "/"  */
-        if (HDstrncmp(objname1, "/", 1))
-        {
-            HDstrcpy(obj1fullname, "/");
-            HDstrcat(obj1fullname, objname1);
-        }
-        else
-            HDstrcpy(obj1fullname, objname1);
-
-        /* make the given object2 fullpath, start with "/" */
-        if (HDstrncmp(objname2, "/", 1))
-        {
-            HDstrcpy(obj2fullname, "/");
-            HDstrcat(obj2fullname, objname2);
-        }
-        else
-            HDstrcpy(obj2fullname, objname2);
-
-        /*----------------------------------------------------------
-         * check if obj1 is root, group, single object or symlink
-         */
-        if(!HDstrcmp(obj1fullname, "/"))
-        {
-            obj1type = H5TRAV_TYPE_GROUP;
-        }
-        else
-        {
-            /* check if link itself exist */
-            if(H5Lexists(file1_id, obj1fullname, H5P_DEFAULT) <= 0) 
-            {
-                parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1);
-                options->err_stat = 1;
-                goto out;
-            }
-            /* get info from link */
-            if(H5Lget_info(file1_id, obj1fullname, &src_linfo1, H5P_DEFAULT) < 0) 
-            {
-                parallel_print("Unable to get link info from <%s>\n", obj1fullname);
-                goto out;
-            }
-
-            info1_lp = info1_obj;
-
-            /* 
-             * check the type of specified path for hard and symbolic links
-             */
-            if(src_linfo1.type == H5L_TYPE_HARD)
-            {
-                /* optional data pass */
-                info1_obj->opts = (diff_opt_t*)options;
-
-                if(H5Oget_info_by_name(file1_id, obj1fullname, &oinfo1, H5P_DEFAULT) < 0)
-                {
-                    parallel_print("Error: Could not get file contents\n");
-                    options->err_stat = 1;
-                    goto out;
-                }
-                obj1type = oinfo1.type;
-                trav_info_add(info1_obj, obj1fullname, obj1type);
-            }
-            else if (src_linfo1.type == H5L_TYPE_SOFT)
-            {
-                obj1type = H5TRAV_TYPE_LINK;
-                trav_info_add(info1_obj, obj1fullname, obj1type);
-            }
-            else if (src_linfo1.type == H5L_TYPE_EXTERNAL)
-            {
-                obj1type = H5TRAV_TYPE_UDLINK;
-                trav_info_add(info1_obj, obj1fullname, obj1type);
-            }
-        }
-
-        /*----------------------------------------------------------
-         * check if obj2 is root, group, single object or symlink
-         */
-        if(!HDstrcmp(obj2fullname, "/"))
-        {
-            obj2type = H5TRAV_TYPE_GROUP;
-        }
-        else
-        {
-            /* check if link itself exist */
-            if(H5Lexists(file2_id, obj2fullname, H5P_DEFAULT) <= 0) 
-            {
-                parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2);
-                options->err_stat = 1;
-                goto out;
-            }
-            /* get info from link */
-            if(H5Lget_info(file2_id, obj2fullname, &src_linfo2, H5P_DEFAULT) < 0) 
-            {
-                parallel_print("Unable to get link info from <%s>\n", obj2fullname);
-                goto out;
-            }
-
-            info2_lp = info2_obj;
-
-            /* 
-             * check the type of specified path for hard and symbolic links
-             */
-            if(src_linfo2.type == H5L_TYPE_HARD)
-            {
-                /* optional data pass */
-                info2_obj->opts = (diff_opt_t*)options;
-
-                if(H5Oget_info_by_name(file2_id, obj2fullname, &oinfo2, H5P_DEFAULT) < 0)
-                {
-                    parallel_print("Error: Could not get file contents\n");
-                    options->err_stat = 1;
-                    goto out;
-                }
-                obj2type = oinfo2.type;
-                trav_info_add(info2_obj, obj2fullname, obj2type);
-            }
-            else if (src_linfo2.type == H5L_TYPE_SOFT)
-            {
-                obj2type = H5TRAV_TYPE_LINK;
-                trav_info_add(info2_obj, obj2fullname, obj2type);
-            }
-            else if (src_linfo2.type == H5L_TYPE_EXTERNAL)
-            {
-                obj2type = H5TRAV_TYPE_UDLINK;
-                trav_info_add(info2_obj, obj2fullname, obj2type);
-            }
-        }           
-    }
-    /* if no object specified */
-    else
-    {
-        /* set root group */
-        obj1fullname = (char*)HDcalloc(2, sizeof(char));
-        HDstrcat(obj1fullname, "/");
-        obj2fullname = (char*)HDcalloc(2, sizeof(char));
-        HDstrcat(obj2fullname, "/");
-    }
-
-   /* 
-    * If verbose options is used, need to traverse thorugh the list of objects 
-    * in the group to print out objects information.
-    * Use h5tools_is_obj_same() to improve performance by skipping 
-    * comparing details of same objects. 
-    */
-    if(!(options->m_verbose || options->m_report))
-    {
-        if (h5tools_is_obj_same(file1_id,obj1fullname,file2_id,obj2fullname)!=0)
-            goto out;
-    }
-
-    /*---------------------------------------------
-     * check for following symlinks 
-     */
-    if (options->follow_links)
-    {
-        /* pass how to handle printing warning to linkinfo option */
-        if(print_warn(options))
-            trg_linfo1.opt.msg_mode = trg_linfo2.opt.msg_mode = 1;
-
-        /*-------------------------------
-         * check symbolic link (object1)
-         */
-        l_ret = H5tools_get_symlink_info(file1_id, obj1fullname, &trg_linfo1, TRUE);
-        /* dangling link */
-        if (l_ret == 0)
-        {
-            if (options->no_dangle_links)
-            {
-                /* gangling link is error */
-                if(options->m_verbose)
-                    parallel_print("Warning: <%s> is a dangling link.\n", obj1fullname);
-                options->err_stat = 1;
-                goto out;
-            }
-            else
-            {
-                if(options->m_verbose)
-                    parallel_print("obj1 <%s> is a dangling link.\n", obj1fullname);
-                nfound++;
-                print_found(nfound);
-                goto out;
-            }
-        }
-        else if(l_ret < 0) /* fail */
-        {
-            parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1);
-            options->err_stat = 1;
-            goto out;
-        }
-        else if(l_ret != 2) /* symbolic link */
-            obj1type = trg_linfo1.trg_type;
-
-        /*-------------------------------
-         * check symbolic link (object2)
-         */
-        l_ret = H5tools_get_symlink_info(file2_id, obj2fullname, &trg_linfo2, TRUE);
-        /* dangling link */
-        if (l_ret == 0)
-        {
-            if (options->no_dangle_links)
-            {
-                /* gangling link is error */
-                if(options->m_verbose)
-                    parallel_print("Warning: <%s> is a dangling link.\n", obj2fullname);
-                options->err_stat = 1;
-                goto out;
-            }
-            else
-            {
-                if(options->m_verbose)
-                    parallel_print("obj2 <%s> is a dangling link.\n", obj2fullname);
-                nfound++;
-                print_found(nfound);
-                goto out;
-            }
-        }
-        else if(l_ret < 0) /* fail */ 
-        {
-            parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2);
-            options->err_stat = 1;
-            goto out;
-        }
-        else if(l_ret != 2)  /* symbolic link */
-            obj2type = trg_linfo2.trg_type;
-    } /* end of if follow symlinks */
-
-
-    /* if both obj1 and obj2 are group */
-    if (obj1type == H5TRAV_TYPE_GROUP && obj2type == H5TRAV_TYPE_GROUP)
-    {
-
-        /* 
-         * traverse group1 
-         */
-        trav_info_init(fname1, file1_id, &info1_grp);
-        /* optional data pass */
-        info1_grp->opts = (diff_opt_t*)options;
-
-        if(h5trav_visit(file1_id,obj1fullname,TRUE,TRUE,
-                        trav_grp_objs,trav_grp_symlinks, info1_grp) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            options->err_stat = 1;
-            goto out;
-        }
-        info1_lp = info1_grp;
-
-        /* 
-         * traverse group2 
-         */
-        trav_info_init(fname2, file2_id, &info2_grp);
-        /* optional data pass */
-        info2_grp->opts = (diff_opt_t*)options;
-
-        if(h5trav_visit(file2_id,obj2fullname,TRUE,TRUE,
-                        trav_grp_objs,trav_grp_symlinks, info2_grp) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            options->err_stat = 1;
-            goto out;
-        } /* end if */
-        info2_lp = info2_grp;
-
-
-#ifdef H5_HAVE_PARALLEL
-        if(g_Parallel)
-        {
-            if((HDstrlen(fname1) > MAX_FILENAME) || 
-               (HDstrlen(fname2) > MAX_FILENAME))
-            {
-                fprintf(stderr, "The parallel diff only supports path names up to %d characters\n", MAX_FILENAME);
-                MPI_Abort(MPI_COMM_WORLD, 0);
-            } /* end if */
-
-            HDstrcpy(filenames[0], fname1);
-            HDstrcpy(filenames[1], fname2);
-
-            /* Alert the worker tasks that there's going to be work. */
-            for(i = 1; i < g_nTasks; i++)
-                MPI_Send(filenames, (MAX_FILENAME * 2), MPI_CHAR, i, MPI_TAG_PARALLEL, MPI_COMM_WORLD);
-        } /* end if */
-#endif
-        build_match_list (obj1fullname, info1_lp, obj2fullname, info2_lp, 
-                         &match_list, options);
-        nfound = diff_match(file1_id, obj1fullname, info1_lp, 
-                            file2_id, obj2fullname, info2_lp, 
-                            match_list, options); 
-    }
-    else
-    {
-#ifdef H5_HAVE_PARALLEL
-        if(g_Parallel)
-            /* Only single object diff, parallel workers won't be needed */
-            phdiff_dismiss_workers();
-#endif
-
-        nfound = diff_compare(file1_id, fname1, obj1fullname, info1_lp,
-                              file2_id, fname2, obj2fullname, info2_lp,
-                              options);
-    }
-
-out:
-#ifdef H5_HAVE_PARALLEL
-    if(g_Parallel)
-        /* All done at this point, let tasks know that they won't be needed */
-        phdiff_dismiss_workers();
-#endif
-    /* free buffers in trav_info structures */
-    if (info1_obj)
-        trav_info_free(info1_obj);
-    if (info2_obj)
-        trav_info_free(info2_obj);
-
-    if (info1_grp)
-        trav_info_free(info1_grp);
-    if (info2_grp)
-        trav_info_free(info2_grp);
-
-    /* free buffers */
-    if (obj1fullname)
-        HDfree(obj1fullname);
-    if (obj2fullname)
-        HDfree(obj2fullname);
-
-    /* free link info buffer */
-    if (trg_linfo1.trg_path)
-        HDfree(trg_linfo1.trg_path);
-    if (trg_linfo2.trg_path)
-        HDfree(trg_linfo2.trg_path);
-
-    /* close */
-    H5E_BEGIN_TRY
-    {
-        H5Fclose(file1_id);
-        H5Fclose(file2_id);
-    } H5E_END_TRY;
-
-    return nfound;
-}
-
-
-
-/*-------------------------------------------------------------------------
- * Function: diff_match
- *
- * Purpose: 
- *  Compare common objects in given groups according to table structure. 
- *  The table structure has flags which can be used to find common objects 
- *  and will be compared. 
- *  Common object means same name (absolute path) objects in both location.
- *
- * Return: Number of differences found
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- *
- * Date: May 9, 2003
- *
- * Modifications: Jan 2005 Leon Arber, larber@uiuc.edu
- *    Added support for parallel diffing
- *
- * Pedro Vicente, pvn@hdfgroup.org, Nov 4, 2008
- *    Compare the graph and make h5diff return 1 for difference if
- * 1) the number of objects in file1 is not the same as in file2
- * 2) the graph does not match, i.e same names (absolute path)
- * 3) objects with the same name are not of the same type
- *-------------------------------------------------------------------------
- */
-hsize_t diff_match(hid_t file1_id, const char *grp1, trav_info_t *info1,
-                   hid_t file2_id, const char *grp2, trav_info_t *info2,
-                   trav_table_t *table, diff_opt_t *options)
-{
-    hsize_t      nfound = 0;
-    unsigned     i;
-
-    char * grp1_path = "";
-    char * grp2_path = "";
-    char * obj1_fullpath = NULL;
-    char * obj2_fullpath = NULL;
-    h5trav_type_t objtype;
-    diff_args_t argdata;
-
-
-    /* 
-     * if not root, prepare object name to be pre-appended to group path to
-     * make full path
-     */
-    if (HDstrcmp (grp1, "/"))
-        grp1_path = grp1;
-    if (HDstrcmp (grp2, "/"))
-        grp2_path = grp2;
-
-    /*-------------------------------------------------------------------------
-    * regarding the return value of h5diff (0, no difference in files, 1 difference )
-    * 1) the number of objects in file1 must be the same as in file2
-    * 2) the graph must match, i.e same names (absolute path)
-    * 3) objects with the same name must be of the same type
-    *-------------------------------------------------------------------------
-    */     
-       
-    /* not valid compare nused when --exclude-path option is used */
-    if (!options->exclude_path)
-    {
-        /* number of different objects */
-        if ( info1->nused != info2->nused )
-        {
-            options->contents = 0;
-        }
-    }
-    
-    /* objects in one file and not the other */
-    for( i = 0; i < table->nobjs; i++)
-    {
-        if( table->objs[i].flags[0] != table->objs[i].flags[1] )
-        {
-            options->contents = 0;
-            break;
-        }
-    }
-
-    /* objects with the same name but different HDF5 types */
-    for( i = 0; i < table->nobjs; i++) 
-    {
-        if ( table->objs[i].flags[0] && table->objs[i].flags[1] )
-        {
-            if ( table->objs[i].type != table->objs[i].type )
-            {
-                options->contents = 0;
-            }
-        }
-    }
-
-    /*-------------------------------------------------------------------------
-    * do the diff for common objects
-    *-------------------------------------------------------------------------
-    */
-#ifdef H5_HAVE_PARALLEL
-    {
-    char *workerTasks = (char*)HDmalloc((g_nTasks - 1) * sizeof(char));
-    int n;
-    int busyTasks = 0;
-    struct diffs_found nFoundbyWorker;
-    struct diff_mpi_args args;
-    int havePrintToken = 1;
-    MPI_Status Status;
-
-    /*set all tasks as free */
-    HDmemset(workerTasks, 1, (g_nTasks - 1));
-#endif
-
-    for(i = 0; i < table->nobjs; i++)
-    {
-        if( table->objs[i].flags[0] && table->objs[i].flags[1])
-        {
-            objtype = table->objs[i].type;
-            /* make full path for obj1 */
-            obj1_fullpath = (char*)HDcalloc (strlen(grp1_path) + strlen (table->objs[i].name) + 1, sizeof (char));
-            HDstrcpy(obj1_fullpath, grp1_path);
-            HDstrcat(obj1_fullpath, table->objs[i].name);
-
-            /* make full path for obj2 */
-            obj2_fullpath = (char*)HDcalloc (strlen(grp2_path) + strlen (table->objs[i].name) + 1, sizeof (char));
-            HDstrcpy(obj2_fullpath, grp2_path);
-            HDstrcat(obj2_fullpath, table->objs[i].name);
-
-            /* Set argdata to pass other args into diff() */
-            argdata.type = objtype;
-            argdata.is_same_trgobj = table->objs[i].is_same_trgobj;
-
-            options->cmn_objs = 1;
-            if(!g_Parallel)
-            {
-                nfound += diff(file1_id, obj1_fullpath,
-                               file2_id, obj2_fullpath, 
-                               options, &argdata);
-            } /* end if */
-#ifdef H5_HAVE_PARALLEL
-            else
-            {
-                int workerFound = 0;
-
-                h5diffdebug("beginning of big else block\n");
-                /* We're in parallel mode */
-                /* Since the data type of diff value is hsize_t which can
-                * be arbitary large such that there is no MPI type that
-                * matches it, the value is passed between processes as
-                * an array of bytes in order to be portable.  But this
-                * may not work in non-homogeneous MPI environments.
-                */
-
-                /*Set up args to pass to worker task. */
-                if(HDstrlen(obj1_fullpath) > 255 || 
-                   HDstrlen(obj2_fullpath) > 255)
-                {
-                    printf("The parallel diff only supports object names up to 255 characters\n");
-                    MPI_Abort(MPI_COMM_WORLD, 0);
-                } /* end if */
-
-                /* set args struct to pass */
-                HDstrcpy(args.name1, obj1_fullpath);
-                HDstrcpy(args.name2, obj2_fullpath);
-                args.options = *options;
-                args.argdata.type = objtype;
-                args.argdata.is_same_trgobj = table->objs[i].is_same_trgobj;
-
-                h5diffdebug2("busyTasks=%d\n", busyTasks);
-                /* if there are any outstanding print requests, let's handle one. */
-                if(busyTasks > 0)
-                {
-                    int incomingMessage;
-
-                    /* check if any tasks freed up, and didn't need to print. */
-                    MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                    /* first block*/
-                    if(incomingMessage)
-                    {
-                        workerTasks[Status.MPI_SOURCE - 1] = 1;
-                        MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                        nfound += nFoundbyWorker.nfound;
-                        options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                        busyTasks--;
-                    } /* end if */
-
-                    /* check to see if the print token was returned. */
-                    if(!havePrintToken)
-                    {
-                        /* If we don't have the token, someone is probably sending us output */
-                        print_incoming_data();
-
-                        /* check incoming queue for token */
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                        /* incoming token implies free task. */
-                        if(incomingMessage) {
-                            workerTasks[Status.MPI_SOURCE - 1] = 1;
-                            MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                            nfound += nFoundbyWorker.nfound;
-                            options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                            busyTasks--;
-                            havePrintToken = 1;
-                        } /* end if */
-                    } /* end if */
-
-                    /* check to see if anyone needs the print token. */
-                    if(havePrintToken)
-                    {
-                        /* check incoming queue for print token requests */
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &incomingMessage, &Status);
-                        if(incomingMessage)
-                        {
-                            MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status);
-                            MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-                            havePrintToken = 0;
-                        } /* end if */
-                    } /* end if */
-                } /* end if */
-
-                /* check array of tasks to see which ones are free.
-                * Manager task never does work, so freeTasks[0] is really
-                * worker task 0. */
-                for(n = 1; (n < g_nTasks) && !workerFound; n++)
-                {
-                    if(workerTasks[n-1])
-                    {
-                        /* send file id's and names to first free worker */
-                        MPI_Send(&args, sizeof(args), MPI_BYTE, n, MPI_TAG_ARGS, MPI_COMM_WORLD);
-
-                        /* increment counter for total number of prints. */
-                        busyTasks++;
-
-                        /* mark worker as busy */
-                        workerTasks[n - 1] = 0;
-                        workerFound = 1;
-                    } /* end if */
-                } /* end for */
-
-                h5diffdebug2("workerfound is %d \n", workerFound);
-                if(!workerFound)
-                {
-                    /* if they were all busy, we've got to wait for one free up
-                     *  before we can move on.  If we don't have the token, some
-                     * task is currently printing so we'll wait for that task to
-                     * return it.
-                     */
-
-                    if(!havePrintToken)
-                    {
-                        while(!havePrintToken)
-                        {
-                            int incomingMessage;
-
-                            print_incoming_data();
-                            MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-                            if(incomingMessage)
-                            {
-                                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                                havePrintToken = 1;
-                                nfound += nFoundbyWorker.nfound;
-                                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                                /* send this task the work unit. */
-                                MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD);
-                            } /* end if */
-                        } /* end while */
-                    } /* end if */
-                    /* if we do have the token, check for task to free up, or wait for a task to request it */
-                    else
-                    {
-                        /* But first print all the data in our incoming queue */
-                        print_incoming_data();
-                        MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status);
-                        if(Status.MPI_TAG == MPI_TAG_DONE)
-                        {
-                            MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                            nfound += nFoundbyWorker.nfound;
-                            options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                            MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD);
-                        } /* end if */
-                        else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST)
-                        {
-                            int incomingMessage;
-
-                            MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status);
-                            MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-
-                            do
-                            {
-                                MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                                print_incoming_data();
-                            } while(!incomingMessage);
-
-                            MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                            nfound += nFoundbyWorker.nfound;
-                            options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                            MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD);
-                        } /* end else-if */
-                        else
-                        {
-                            printf("ERROR: Invalid tag (%d) received \n", Status.MPI_TAG);
-                            MPI_Abort(MPI_COMM_WORLD, 0);
-                            MPI_Finalize();
-                        } /* end else */
-                    } /* end else */
-                } /* end if */
-            } /* end else */
-#endif /* H5_HAVE_PARALLEL */
-            if (obj1_fullpath)
-                HDfree (obj1_fullpath);
-            if (obj2_fullpath)                
-                HDfree (obj2_fullpath);
-        } /* end if */
-    } /* end for */
-    h5diffdebug("done with for loop\n");
-
-#ifdef H5_HAVE_PARALLEL
-    if(g_Parallel)
-    {
-        /* make sure all tasks are done */
-        while(busyTasks > 0)
-        {
-            MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status);
-            if(Status.MPI_TAG == MPI_TAG_DONE)
-            {
-                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                nfound += nFoundbyWorker.nfound;
-                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                busyTasks--;
-            } /* end if */
-            else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN)
-            {
-                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                nfound += nFoundbyWorker.nfound;
-                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                busyTasks--;
-                havePrintToken = 1;
-            } /* end else-if */
-            else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST)
-            {
-                MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status);
-                if(havePrintToken)
-                {
-                    int incomingMessage;
-
-                    MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-
-                    do {
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                        print_incoming_data();
-                    } while(!incomingMessage);
-
-                    MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                    nfound += nFoundbyWorker.nfound;
-                    options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                    busyTasks--;
-                } /* end if */
-                /* someone else must have it...wait for them to return it, then give it to the task that just asked for it. */
-                else
-                {
-                    int source = Status.MPI_SOURCE;
-                    int incomingMessage;
-
-                    do
-                    {
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                        print_incoming_data();
-                    } while(!incomingMessage);
-
-
-                    MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                    nfound += nFoundbyWorker.nfound;
-                    options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                    busyTasks--;
-                    MPI_Send(NULL, 0, MPI_BYTE, source, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-                } /* end else */
-            } /* end else-if */
-            else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN)
-            {
-                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                nfound += nFoundbyWorker.nfound;
-                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                busyTasks--;
-                havePrintToken = 1;
-            } /* end else-if */
-            else if(Status.MPI_TAG == MPI_TAG_PRINT_DATA)
-            {
-                char  data[PRINT_DATA_MAX_SIZE + 1];
-                HDmemset(data, 0, PRINT_DATA_MAX_SIZE + 1);
-
-                MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status);
-
-                printf("%s", data);
-            } /* end else-if */
-            else
-            {
-                printf("ph5diff-manager: ERROR!! Invalid tag (%d) received \n", Status.MPI_TAG);
-                MPI_Abort(MPI_COMM_WORLD, 0);
-            } /* end else */
-        } /* end while */
-
-        for(i = 1; i < g_nTasks; i++)
-            MPI_Send(NULL, 0, MPI_BYTE, i, MPI_TAG_END, MPI_COMM_WORLD);
-
-        /* Print any final data waiting in our queue */
-        print_incoming_data();
-    } /* end if */
-    h5diffdebug("done with if block\n");
-
-    free(workerTasks);
-    }
-#endif /* H5_HAVE_PARALLEL */
-
-    /* free table */
-    if (table)
-        trav_table_free(table);
-
-    return nfound;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: diff_compare
- *
- * Purpose: get objects from list, and check for the same type
- *
- * Return: Number of differences found
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- * Date: May 9, 2003
- *
- * Programmer: Jonathan Kim
- *  - add following links feature (Feb 11,2010)
- *-------------------------------------------------------------------------
- */
-
-hsize_t diff_compare(hid_t file1_id,
-                     const char *file1_name,
-                     const char *obj1_name,
-                     trav_info_t *info1,
-                     hid_t file2_id,
-                     const char *file2_name,
-                     const char *obj2_name,
-                     trav_info_t *info2,
-                     diff_opt_t *options)
-{
-    int     f1 = 0;
-    int     f2 = 0;
-    hsize_t nfound = 0;
-    ssize_t i,j;
-    int l_ret;
-    int is_dangle_link1 = 0;
-    int is_dangle_link2 = 0;
-    const char *obj1name = obj1_name;
-    const char *obj2name = obj2_name;
-    diff_args_t argdata;
-
-    /* local variables for diff() */
-    h5trav_type_t obj1type, obj2type;
-
-    /* to get link info */
-    h5tool_link_info_t linkinfo1;
-    h5tool_link_info_t linkinfo2;
-
-    /* init link info struct */
-    HDmemset(&linkinfo1, 0, sizeof(h5tool_link_info_t));
-    HDmemset(&linkinfo2, 0, sizeof(h5tool_link_info_t));
-
-    i = h5trav_getindex (info1, obj1name);
-    j = h5trav_getindex (info2, obj2name);
-
-    if (i == -1)
-    {
-        parallel_print ("Object <%s> could not be found in <%s>\n", obj1name,
-            file1_name);
-        f1 = 1;
-    }
-    if (j == -1)
-    {
-        parallel_print ("Object <%s> could not be found in <%s>\n", obj2name,
-            file2_name);
-        f2 = 1;
-    }
-    if (f1 || f2)
-    {
-        options->err_stat = 1;
-        return 0;
-    }
-    /* use the name with "/" first, as obtained by iterator function */
-    obj1name = info1->paths[i].path;
-    obj2name = info2->paths[j].path;
-
-    obj1type = info1->paths[i].type;
-    obj2type = info2->paths[j].type;
-
-    /*-----------------------------------------------------------------
-     * follow link option, compare with target object 
-    */
-    if (options->follow_links)
-    {
-        /* pass how to handle printing warning to linkinfo option */
-        if(print_warn(options))
-            linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1;
-
-        /*------------------------------------------------------------
-         * Soft links
-         *------------------------------------------------------------*/
-
-        /*--------------------------
-         * if object1 soft link   */
-        if (obj1type == H5TRAV_TYPE_LINK)
-        {
-            /* get type of target object */
-            l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj1name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link1 = 1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* target type for diff() */
-                obj1type = linkinfo1.trg_type;
-            }
-        }
-        
-        /*-----------------------------
-         * if object2 is soft link   */
-        if (obj2type == H5TRAV_TYPE_LINK)
-        {
-            /* get type target object */
-            l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj2name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link2=1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* target type for diff() */
-                obj2type = linkinfo2.trg_type;
-            }
-        }
-
-        /*------------------------------------------------------------
-         * External links
-         *------------------------------------------------------------*/
-
-        /*--------------------------------
-         * if object1 is external link  */
-        if (obj1type == H5TRAV_TYPE_UDLINK)
-        {
-            /* get type and name of target object */
-            l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj1name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link1 = 1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* for external link */
-                if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL)
-                    obj1type = linkinfo1.trg_type;
-            }
-        }
-
-        /*--------------------------------
-         * if object2 is external link  */
-        if (obj2type == H5TRAV_TYPE_UDLINK)
-        {
-            /* get type and name of target object */
-            l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj2name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link2 = 1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* for external link */
-                if(linkinfo2.linfo.type == H5L_TYPE_EXTERNAL)
-                    obj2type = linkinfo2.trg_type;
-            }
-        }
-        /* found dangling link */
-        if (is_dangle_link1 || is_dangle_link2)
-            goto out;
-    } /* end of follow_links */
-    
-    /* objects are not the same type */
-    if (obj1type != obj2type)
-    {
-        if (options->m_verbose||options->m_list_not_cmp)
-        {
-            parallel_print("<%s> is of type %s and <%s> is of type %s\n",
-            obj1name, get_type(obj1type), 
-            obj2name, get_type(obj2type));
-        }
-        options->not_cmp=1;
-        goto out;
-    }
-
-    /* Set argdata to pass other args into diff() */
-    argdata.type = obj1type;
-    argdata.is_same_trgobj = 0;
-
-    nfound = diff(file1_id, obj1name,
-                  file2_id, obj2name,
-                  options, &argdata);
-
-out:
-    /*-------------------------------
-     * handle dangling link(s) */
-    /* both obj1 and obj2 are dangling links */
-    if(is_dangle_link1 && is_dangle_link2)
-    {
-        if(print_objname(options, nfound))
-        {
-            do_print_objname("dangling link", obj1name, obj2name, options);
-            print_found(nfound);
-        }
-    }
-    /* obj1 is dangling link */
-    else if (is_dangle_link1)
-    {
-        if(options->m_verbose)
-           parallel_print("obj1 <%s> is a dangling link.\n", obj1name);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-    /* obj2 is dangling link */
-    else if (is_dangle_link2)
-    {
-        if(options->m_verbose)
-            parallel_print("obj2 <%s> is a dangling link.\n", obj2name);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-
-    /* free link info buffer */
-    if (linkinfo1.trg_path)
-        HDfree(linkinfo1.trg_path);
-    if (linkinfo2.trg_path)
-        HDfree(linkinfo2.trg_path);
-
-    return nfound;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: diff
- *
- * Purpose: switch between types and choose the diff function
- * TYPE is either
- *  H5G_GROUP         Object is a group
- *  H5G_DATASET       Object is a dataset
- *  H5G_TYPE          Object is a named data type
- *  H5G_LINK          Object is a symbolic link
- *
- * Return: Number of differences found
- *
- * Programmer: Jonathan Kim
- *  - add following links feature (Feb 11,2010)
- *  - Change to use diff_args_t to pass the rest of args.
- *    Passing through it instead of individual args provides smoother
- *    extensibility through its members along with MPI code update for ph5diff
- *    as it doesn't require interface change.
- *    (May 6,2011)
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- * Date: May 9, 2003
- *-------------------------------------------------------------------------
- */
-
-hsize_t diff(hid_t file1_id,
-              const char *path1,
-              hid_t file2_id,
-              const char *path2,
-              diff_opt_t * options,
-              diff_args_t *argdata)
-{
-    hid_t   type1_id = (-1);
-    hid_t   type2_id = (-1);
-    hid_t   grp1_id = (-1);
-    hid_t   grp2_id = (-1);
-    int     ret;
-    int     is_dangle_link1 = 0;
-    int     is_dangle_link2 = 0;
-    int     is_hard_link = 0;
-    hsize_t nfound = 0;
-
-
-    /* to get link info */
-    h5tool_link_info_t linkinfo1;
-    h5tool_link_info_t linkinfo2;
-
-    /*init link info struct */
-    HDmemset(&linkinfo1,0,sizeof(h5tool_link_info_t));
-    HDmemset(&linkinfo2,0,sizeof(h5tool_link_info_t));
-
-    /* pass how to handle printing warnings to linkinfo option */
-    if(print_warn(options))
-        linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1;
-
-    /* 
-     * Get target object info for obj1 and obj2 and check dangling links.
-     * (for hard-linked-objects, because diff() only get the obj1's type, 
-     *  so obj2's type should be check here when diff() is called from 
-     *  diff_match() for same-named objects with dangling link only one side.)
-     */
-
-    /* target object1 - get type and name */
-    ret = H5tools_get_symlink_info(file1_id, path1, &linkinfo1, TRUE);
-    /* dangling link */
-    if (ret == 0)
-    {
-        if (options->no_dangle_links)
-        {
-            /* gangling link is error */
-            if(options->m_verbose)
-                parallel_print("Warning: <%s> is a dangling link.\n", path1);
-            goto out;
-        }
-        else
-            is_dangle_link1 = 1;
-    }
-    else if (ret < 0)
-        goto out;
-
-    /* target object2 - get type and name */
-    ret = H5tools_get_symlink_info(file2_id, path2, &linkinfo2, TRUE);
-    /* dangling link */
-    if (ret == 0)
-    {
-        if (options->no_dangle_links)
-        {
-            /* gangling link is error */
-            if(options->m_verbose)
-                parallel_print("Warning: <%s> is a dangling link.\n", path2);
-            goto out;
-        }
-        else
-            is_dangle_link2 = 1;
-    }
-    else if (ret < 0)
-        goto out;
-                
-    /* found dangling link */
-    if (is_dangle_link1 || is_dangle_link2)
-        goto out2;
-  
-    /* 
-     * If both points to the same target object, skip comparing details inside
-     * of the objects to improve performance.
-     * Always check for the hard links, otherwise if follow symlink option is 
-     * specified.
-     *
-     * Perform this to match the outputs as bypassing.
-     */
-     is_hard_link = (argdata->type == H5TRAV_TYPE_DATASET ||
-                     argdata->type == H5TRAV_TYPE_NAMED_DATATYPE ||
-                     argdata->type == H5TRAV_TYPE_GROUP);
-     if (options->follow_links || is_hard_link)
-     {
-        if (argdata->is_same_trgobj)
-        {
-            /* print information is only verbose option is used */
-            if(options->m_verbose || options->m_report)
-            {
-                switch(argdata->type)
-                {
-                case H5TRAV_TYPE_DATASET:
-                    do_print_objname("dataset", path1, path2, options);
-                    break; 
-                case H5TRAV_TYPE_NAMED_DATATYPE:
-                    do_print_objname("datatype", path1, path2, options);
-                    break;
-                case H5TRAV_TYPE_GROUP:
-                    do_print_objname("group", path1, path2, options);
-                    break;
-                case H5TRAV_TYPE_LINK:
-                    do_print_objname("link", path1, path2, options);
-                    break;
-                case H5TRAV_TYPE_UDLINK:
-                    if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL)
-                        do_print_objname("external link", path1, path2, options);
-                    else
-                        do_print_objname ("user defined link", path1, path2, options);
-                    break; 
-                default:
-                    parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n",
-                        path1, path2, get_type(argdata->type) );
-                    options->not_cmp = 1;
-                    break;
-                } /* switch(type)*/
-
-                print_found(nfound);
-            } /* if(options->m_verbose || options->m_report) */
-
-            goto out2;
-        }
-    }
-
-    switch(argdata->type)
-    {
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_DATASET
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_DATASET:
-			/* verbose (-v) and report (-r) mode */
-            if(options->m_verbose || options->m_report)
-            {
-                do_print_objname("dataset", path1, path2, options);
-                nfound = diff_dataset(file1_id, file2_id, path1, path2, options);
-                print_found(nfound);
-            }
-            /* quiet mode (-q), just count differences */
-            else if(options->m_quiet)
-            {
-                nfound = diff_dataset(file1_id, file2_id, path1, path2, options);
-            }
-			/* the rest (-c, none, ...) */
-            else
-            {
-                nfound = diff_dataset(file1_id, file2_id, path1, path2, options);
-                /* print info if difference found  */
-                if (nfound)
-                {
-                    do_print_objname("dataset", path1, path2, options);
-                    print_found(nfound);	
-                }
-            }
-            break;
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_NAMED_DATATYPE
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_NAMED_DATATYPE:
-            if((type1_id = H5Topen2(file1_id, path1, H5P_DEFAULT)) < 0)
-                goto out;
-            if((type2_id = H5Topen2(file2_id, path2, H5P_DEFAULT)) < 0)
-                goto out;
-
-            if((ret = H5Tequal(type1_id, type2_id)) < 0)
-                goto out;
-
-            /* if H5Tequal is > 0 then the datatypes refer to the same datatype */
-            nfound = (ret > 0) ? 0 : 1;
-
-            if(print_objname(options,nfound))
-                do_print_objname("datatype", path1, path2, options);
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-
-            /*-----------------------------------------------------------------
-             * compare attributes
-             * the if condition refers to cases when the dataset is a 
-             * referenced object
-             *-----------------------------------------------------------------
-             */
-            if(path1)
-                nfound += diff_attr(type1_id, type2_id, path1, path2, options);
-
-            if(H5Tclose(type1_id) < 0)
-                goto out;
-            if(H5Tclose(type2_id) < 0)
-                goto out;
-            break;
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_GROUP
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_GROUP:
-            if(print_objname(options, nfound))
-                do_print_objname("group", path1, path2, options);
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-
-            if((grp1_id = H5Gopen2(file1_id, path1, H5P_DEFAULT)) < 0)
-                goto out;
-            if((grp2_id = H5Gopen2(file2_id, path2, H5P_DEFAULT)) < 0)
-                goto out;
-
-            /*-----------------------------------------------------------------
-             * compare attributes
-             * the if condition refers to cases when the dataset is a 
-             * referenced object
-             *-----------------------------------------------------------------
-             */
-            if(path1)
-                nfound += diff_attr(grp1_id, grp2_id, path1, path2, options);
-
-            if(H5Gclose(grp1_id) < 0)
-                goto out;
-            if(H5Gclose(grp2_id) < 0)
-                goto out;
-            break;
-
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_LINK
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_LINK:
-            {
-            ret = HDstrcmp(linkinfo1.trg_path, linkinfo2.trg_path);
-
-            /* if the target link name is not same then the links are "different" */
-            nfound = (ret != 0) ? 1 : 0;
-
-            if(print_objname(options, nfound))
-                do_print_objname("link", path1, path2, options);
-
-            if (options->follow_links)
-            {
-                /* objects are not the same type */
-                if (linkinfo1.trg_type != linkinfo2.trg_type)
-                {
-                    if (options->m_verbose||options->m_list_not_cmp)
-                    {
-                        parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type));
-                    }
-                    options->not_cmp=1;
-                    goto out;
-                }
-
-                /* Renew type in argdata to pass into diff(). 
-                 * For recursive call, argdata.is_same_trgobj is already
-                 * set from initial call, so don't reset here */
-                argdata->type = linkinfo1.trg_type;
-
-                /* call self to compare target object */
-                nfound += diff(file1_id, path1, 
-                               file2_id, path2, 
-                               options, argdata);
-            }
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-
-            }
-            break;
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_UDLINK
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_UDLINK:
-            {
-            /* Only external links will have a query function registered */
-            if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) 
-            {
-                /* If the buffers are the same size, compare them */
-                if(linkinfo1.linfo.u.val_size == linkinfo2.linfo.u.val_size) 
-                {
-                    ret = HDmemcmp(linkinfo1.trg_path, linkinfo2.trg_path, linkinfo1.linfo.u.val_size);
-                }
-                else
-                    ret = 1;
-
-                /* if "linkinfo1.trg_path" != "linkinfo2.trg_path" then the links
-                 * are "different" extlinkinfo#.path is combination string of 
-                 * file_name and obj_name
-                 */
-                nfound = (ret != 0) ? 1 : 0;
-
-                if(print_objname(options, nfound))
-                    do_print_objname("external link", path1, path2, options);
-
-                if (options->follow_links)
-                {
-                    /* objects are not the same type */
-                    if (linkinfo1.trg_type != linkinfo2.trg_type)
-                    {
-                        if (options->m_verbose||options->m_list_not_cmp)
-                        {
-                            parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type));
-                        }
-                        options->not_cmp=1;
-                        goto out;
-                    }
-
-                    /* Renew type in argdata to pass into diff(). 
-                     * For recursive call, argdata.is_same_trgobj is already
-                     * set from initial call, so don't reset here */
-                    argdata->type = linkinfo1.trg_type;
-
-                    nfound = diff(file1_id, path1,  
-                                  file2_id, path2, 
-                                  options, argdata);
-                } 
-            } /* end if */
-            else 
-            {
-                /* If one or both of these links isn't an external link, we can only
-                 * compare information from H5Lget_info since we don't have a query
-                 * function registered for them.
-                 *
-                 * If the link classes or the buffer length are not the
-                 * same, the links are "different"
-                 */
-                if((linkinfo1.linfo.type != linkinfo2.linfo.type) || 
-                   (linkinfo1.linfo.u.val_size != linkinfo2.linfo.u.val_size))
-                    nfound = 1;
-                else
-                    nfound = 0;
-
-                if (print_objname (options, nfound))
-                    do_print_objname ("user defined link", path1, path2, options);
-            } /* end else */
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-            }
-            break;
-
-        default:
-            if(options->m_verbose)
-                parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n",
-                    path1, path2, get_type(argdata->type) );
-            options->not_cmp = 1;
-            break;
-     }
-
-    /* free link info buffer */
-    if (linkinfo1.trg_path)
-        HDfree(linkinfo1.trg_path);
-    if (linkinfo2.trg_path)
-        HDfree(linkinfo2.trg_path);
-
-    return nfound;
-
-out:
-    options->err_stat = 1;
-
-out2:
-    /*-----------------------------------
-     * handle dangling link(s) 
-     */
-    /* both path1 and path2 are dangling links */
-    if(is_dangle_link1 && is_dangle_link2)
-    {
-        if(print_objname(options, nfound))
-        {
-            do_print_objname("dangling link", path1, path2, options);
-            print_found(nfound);
-        }
-    }
-    /* path1 is dangling link */
-    else if (is_dangle_link1)
-    {
-        if(options->m_verbose)
-           parallel_print("obj1 <%s> is a dangling link.\n", path1);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-    /* path2 is dangling link */
-    else if (is_dangle_link2)
-    {
-        if(options->m_verbose)
-            parallel_print("obj2 <%s> is a dangling link.\n", path2);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-
-    /* free link info buffer */
-    if (linkinfo1.trg_path)
-        HDfree(linkinfo1.trg_path);
-    if (linkinfo2.trg_path)
-        HDfree(linkinfo2.trg_path);
-
-    /* close */
-    /* disable error reporting */
-    H5E_BEGIN_TRY {
-        H5Tclose(type1_id);
-        H5Tclose(type2_id);
-        H5Gclose(grp1_id);
-        H5Tclose(grp2_id);
-        /* enable error reporting */
-    } H5E_END_TRY;
-
-    return nfound;
-}
-
diff --git a/exsrc/src/o.txt b/exsrc/src/o.txt
deleted file mode 100644
index 47eb655e171c1c1368cc0d51ee2533f5d9a3f59e..0000000000000000000000000000000000000000
--- a/exsrc/src/o.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-o
-yes
-
diff --git a/exsrc/src/pbmplus/Makefile.in b/exsrc/src/pbmplus/Makefile.in
deleted file mode 100644
index da35176c818f83b56e9c37c8007d5e251e545ddf..0000000000000000000000000000000000000000
--- a/exsrc/src/pbmplus/Makefile.in
+++ /dev/null
@@ -1,134 +0,0 @@
-# Makefile for pbmplus tools.
-#
-# Copyright (C) 1989, 1991 by Jef Poskanzer.
-#
-# Permission to use, copy, modify, and distribute this software and its
-# documentation for any purpose and without fee is hereby granted, provided
-# that the above copyright notice appear in all copies and that both that
-# copyright notice and this permission notice appear in supporting
-# documentation.  This software is provided "as is" without express or
-# implied warranty.
-
-# CONFIGURE: gcc makes things go faster on some machines, but not everyone
-# has it.  Warning: do not use gcc's -finline-functions or -fstrength-reduce
-# flags, they can produce incorrect code.  (This is with gcc versions 1.35,
-# 1.36, and 1.37, later versions may fix these bugs.)  Also, on some systems
-# gcc can't compile pnmconvol - dunno why.  And on some systems you can't
-# use the -ansi flag, it gives compilation errors in <math.h>.
-CC =		cc
-#CC =		gcc
-#CC =		gcc -fcombine-regs -fpcc-struct-return
-#CC =		gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return
-
-# CONFIGURE: cc flags go here.
-CFLAGS =	-O -w
-#CFLAGS =	-g
-#CFLAGS =	-g -O
-
-# CONFIGURE: ld flags go here.  Eunice users may want to use -noshare so that
-# the binaries can run standalone.
-LDFLAGS =	-s
-#LDFLAGS =	
-#LDFLAGS =	-noshare
-
-# CONFIGURE: If you have an X11-style rgb color names file, define its
-# path here.  This is used by PPM to parse color names into rgb values.
-# If you don't have such a file, comment this out and use the alternative
-# hex and decimal forms to specify colors (see ppm/pgmtoppm.1 for details).
-RGBDEF =	-DRGB_DB=\"/usr/lib/X11/rgb\"
-
-# CONFIGURE: PBMPLUS's support for TIFF files depends on the library from
-# Sam Leffler's TIFF Software package - see the OTHER.SYSTEMS file for a
-# full description and access information.  To configure PBMPLUS to use the
-# library: first, if necessary, fetch the TIFF Software, unpack it in a
-# scratch directory somewhere, and move the libtiff subdirectory right here
-# into the PBMPLUS top-level directory.  Configure and "make" in the
-# libtiff directory.  Yes, you do have to do the TIFF make by hand, the
-# general PBMPLUS make will *not* make libtiff.  Finally, uncomment the
-# following five definitions.
-#
-# Libtiff is pretty good about portability, but there are some machines
-# it has problems on.  If you run into problems, you may wish to contact
-# Sam directly, at the address listed in the OTHER.SYSTEMS file.
-#
-# By the way, you must have at least version 2.4 of libtiff.  Earlier
-# versions will not work.
-TIFFDEF =	-DLIBTIFF
-TIFFINC =	-I@EXTERNALS@/include
-TIFFLIB =	@EXTERNALS@/lib/libtiff.a
-#TIFFBINARIES =	tifftopnm pnmtotiff
-#TIFFOBJECTS =	tifftopnm.o pnmtotiff.o
-
-# CONFIGURE: Define the directory that you want the binaries copied to.
-# If you need scripts and binaries to be in different directories, you
-# can set that up too.
-INSTALLBINARIES =	@EXTERNALS@/bin
-INSTALLSCRIPTS =	$(INSTALLBINARIES)
-
-# CONFIGURE: Define the directories that you want the manual sources copied to,
-# plus the suffix you want them to have.
-INSTALLMANUALS1 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS1 =	n
-INSTALLMANUALS3 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS3 =	n
-INSTALLMANUALS5 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS5 =	n
-
-# CONFIGURE: Normally the man pages are installed using "cp".  By changing
-# this define you can use something else, for example a script that calls
-# compress or pack.
-MANCP =			cp
-
-# CONFIGURE: Normally the Makefiles build and install separate binaries for
-# each program.  However, on some systems (especially those without shared
-# libraries) this can mean a lot of space.  In this case you might try
-# building a "merge" instead.  The idea here is to link all the binaries
-# together into one huge executable, with a tiny dispatch program as the
-# main.  Then the merged binary is installed with file-system links for
-# each program it includes.  The dispatch routine can tell which program
-# to run by looking at argv[0].  On a Sun3 under SunOS 3.5 the space for
-# executables went from 2.9 meg to .36 meg.
-#
-# Note that if you make a "merge", the executables don't get created
-# until you do the install.
-all:		binaries
-install:	install.bin install.man
-#all:		merge
-#install:	install.merge install.man
-
-# End of configurable definitions.
-
-SHELL =		/bin/sh
-MAKE =		make
-SUBDIRS =	pbm pgm ppm pnm
-
-binaries:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' binaries ); \
-	done
-
-merge:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' merge ); \
-	done
-
-install.bin:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.bin ); \
-	done
-
-install.merge:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.merge ); \
-	done
-
-install.man:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'TIFFBINARIES=$(TIFFBINARIES)' 'INSTALLMANUALS1=$(INSTALLMANUALS1)' 'SUFFIXMANUALS1=$(SUFFIXMANUALS1)' 'INSTALLMANUALS3=$(INSTALLMANUALS3)' 'SUFFIXMANUALS3=$(SUFFIXMANUALS3)' 'INSTALLMANUALS5=$(INSTALLMANUALS5)' 'SUFFIXMANUALS5=$(SUFFIXMANUALS5)' 'MANCP=$(MANCP)' install.man ); \
-	done
-
-clean:
-	-rm -f *.shar *.shar? art.*
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) clean ); \
-	done
diff --git a/exsrc/src/pbmplus/libpbm1.c b/exsrc/src/pbmplus/libpbm1.c
deleted file mode 100644
index 00f3e31b2e818f8a45ecf98af56797cbd0376351..0000000000000000000000000000000000000000
--- a/exsrc/src/pbmplus/libpbm1.c
+++ /dev/null
@@ -1,674 +0,0 @@
-/* libpbm1.c - pbm utility library part 1
-**
-** Copyright (C) 1988 by Jef Poskanzer.
-**
-** Permission to use, copy, modify, and distribute this software and its
-** documentation for any purpose and without fee is hereby granted, provided
-** that the above copyright notice appear in all copies and that both that
-** copyright notice and this permission notice appear in supporting
-** documentation.  This software is provided "as is" without express or
-** implied warranty.
-*/
-
-#include "pbm.h"
-#include "version.h"
-#include "libpbm.h"
-#if __STDC__
-#include <stdarg.h>
-#else /*__STDC__*/
-#include <varargs.h>
-#endif /*__STDC__*/
-
-
-/* Forward routines. */
-
-#if defined(NEED_VFPRINTF1) || defined(NEED_VFPRINTF2)
-int vfprintf ARGS(( FILE* stream, char* format, va_list args ));
-#endif /*NEED_VFPRINTF*/
-
-
-/* Variable-sized arrays. */
-
-char*
-pm_allocrow( cols, size )
-    int cols;
-    int size;
-    {
-    register char* itrow;
-
-    itrow = (char*) malloc( cols * size );
-    if ( itrow == (char*) 0 )
-	pm_error( "out of memory allocating a row" );
-    return itrow;
-    }
-
-void
-pm_freerow( itrow )
-    char* itrow;
-    {
-    free( itrow );
-    }
-
-
-char**
-pm_allocarray( cols, rows, size )
-    int cols, rows;
-    int size;
-    {
-    char** its;
-    int i;
-
-    its = (char**) malloc( rows * sizeof(char*) );
-    if ( its == (char**) 0 )
-	pm_error( "out of memory allocating an array" );
-    its[0] = (char*) malloc( rows * cols * size );
-    if ( its[0] == (char*) 0 )
-	pm_error( "out of memory allocating an array" );
-    for ( i = 1; i < rows; ++i )
-	its[i] = &(its[0][i * cols * size]);
-    return its;
-    }
-
-void
-pm_freearray( its, rows )
-    char** its;
-    int rows;
-    {
-    free( its[0] );
-    free( its );
-    }
-
-
-/* Case-insensitive keyword matcher. */
-
-int
-pm_keymatch( str, keyword, minchars )
-    char* str;
-    char* keyword;
-    int minchars;
-    {
-    register int len;
-
-    len = strlen( str );
-    if ( len < minchars )
-	return 0;
-    while ( --len >= 0 )
-	{
-	register char c1, c2;
-
-	c1 = *str++;
-	c2 = *keyword++;
-	if ( c2 == '\0' )
-	    return 0;
-	if ( isupper( c1 ) )
-	    c1 = tolower( c1 );
-	if ( isupper( c2 ) )
-	    c1 = tolower( c2 );
-	if ( c1 != c2 )
-	    return 0;
-	}
-    return 1;
-    }
-
-
-/* Log base two hacks. */
-
-int
-pm_maxvaltobits( maxval )
-    int maxval;
-    {
-    if ( maxval <= 1 )
-	return 1;
-    else if ( maxval <= 3 )
-	return 2;
-    else if ( maxval <= 7 )
-	return 3;
-    else if ( maxval <= 15 )
-	return 4;
-    else if ( maxval <= 31 )
-	return 5;
-    else if ( maxval <= 63 )
-	return 6;
-    else if ( maxval <= 127 )
-	return 7;
-    else if ( maxval <= 255 )
-	return 8;
-    else if ( maxval <= 511 )
-	return 9;
-    else if ( maxval <= 1023 )
-	return 10;
-    else if ( maxval <= 2047 )
-	return 11;
-    else if ( maxval <= 4095 )
-	return 12;
-    else if ( maxval <= 8191 )
-	return 13;
-    else if ( maxval <= 16383 )
-	return 14;
-    else if ( maxval <= 32767 )
-	return 15;
-    else if ( (long) maxval <= 65535L )
-	return 16;
-    else
-	pm_error( "maxval of %d is too large!", maxval );
-    }
-
-int
-pm_bitstomaxval( bits )
-    int bits;
-    {
-    return ( 1 << bits ) - 1;
-    }
-
-
-/* Initialization. */
-
-static char* progname;
-static int showmessages;
-
-void
-pm_init( argcP, argv )
-    int* argcP;
-    char* argv[];
-    {
-    int argn, i;
-
-    /* Extract program name. */
-    progname = rindex( argv[0], '/');
-    if ( progname == NULL )
-	progname = argv[0];
-    else
-	++progname;
-
-    /* Check for any global args. */
-    showmessages = 1;
-    for ( argn = 1; argn < *argcP; ++argn )
-	{
-	if ( pm_keymatch( argv[argn], "-quiet", 6 ) )
-	    {
-	    showmessages = 0;
-	    }
-	else if ( pm_keymatch( argv[argn], "-version", 7 ) )
-	    {
-	    pm_message( "Version of %s", PBMPLUS_VERSION );
-#ifdef BSD
-	    pm_message( "BSD defined" );
-#endif /*BSD*/
-#ifdef SYSV
-	    pm_message( "SYSV defined" );
-#endif /*SYSV*/
-#ifdef MSDOS
-	    pm_message( "MSDOS defined" );
-#endif /*MSDOS*/
-#ifdef PBMPLUS_RAWBITS
-	    pm_message( "PBMPLUS_RAWBITS defined" );
-#endif /*PBMPLUS_RAWBITS*/
-#ifdef PBMPLUS_BROKENPUTC1
-	    pm_message( "PBMPLUS_BROKENPUTC1 defined" );
-#endif /*PBMPLUS_BROKENPUTC1*/
-#ifdef PBMPLUS_BROKENPUTC2
-	    pm_message( "PBMPLUS_BROKENPUTC2 defined" );
-#endif /*PBMPLUS_BROKENPUTC2*/
-#ifdef PGM_BIGGRAYS
-	    pm_message( "PGM_BIGGRAYS defined" );
-#endif /*PGM_BIGGRAYS*/
-#ifdef PPM_PACKCOLORS
-	    pm_message( "PPM_PACKCOLORS defined" );
-#endif /*PPM_PACKCOLORS*/
-#ifdef DEBUG
-	    pm_message( "DEBUG defined" );
-#endif /*DEBUG*/
-#ifdef NEED_VFPRINTF1
-	    pm_message( "NEED_VFPRINTF1 defined" );
-#endif /*NEED_VFPRINTF1*/
-#ifdef NEED_VFPRINTF2
-	    pm_message( "NEED_VFPRINTF2 defined" );
-#endif /*NEED_VFPRINTF2*/
-#ifdef RGB_DB
-	    pm_message( "RGB_DB=\"%s\"", RGB_DB );
-#endif /*RGB_DB*/
-#ifdef LIBTIFF
-	    pm_message( "LIBTIFF defined" );
-#endif /*LIBTIFF*/
-	    exit( 0 );
-	    }
-	else
-	    continue;
-	for ( i = argn + 1; i <= *argcP; ++i )
-	    argv[i - 1] = argv[i];
-	--(*argcP);
-	}
-    }
-
-void
-pbm_init( argcP, argv )
-    int* argcP;
-    char* argv[];
-    {
-    pm_init( argcP, argv );
-    }
-
-
-/* Error handling. */
-
-void
-pm_usage( usage )
-    char* usage;
-    {
-    fprintf( stderr, "usage:  %s %s\n", progname, usage );
-    exit( 1 );
-    }
-
-void
-pm_perror( reason )
-    char* reason;
-    {
-    extern int errno;
-    char* e;
-
-    e = sys_errlist[errno];
-
-    if ( reason != 0 && reason[0] != '\0' )
-	pm_error( "%s - %s", reason, e );
-    else
-	pm_error( "%s", e );
-    }
-
-#if __STDC__
-void
-pm_message( char* format, ... )
-    {
-    va_list args;
-
-    va_start( args, format );
-#else /*__STDC__*/
-/*VARARGS1*/
-void
-pm_message( va_alist )
-    va_dcl
-    { /*}*/
-    va_list args;
-    char* format;
-
-    va_start( args );
-    format = va_arg( args, char* );
-#endif /*__STDC__*/
-
-    if ( showmessages )
-	{
-	fprintf( stderr, "%s: ", progname );
-	(void) vfprintf( stderr, format, args );
-	fputc( '\n', stderr );
-	}
-    va_end( args );
-    }
-
-#if __STDC__
-void
-pm_error( char* format, ... )
-    {
-    va_list args;
-
-    va_start( args, format );
-#else /*__STDC__*/
-/*VARARGS1*/
-void
-pm_error( va_alist )
-    va_dcl
-    { /*}*/
-    va_list args;
-    char* format;
-
-    va_start( args );
-    format = va_arg( args, char* );
-#endif /*__STDC__*/
-
-    fprintf( stderr, "%s: ", progname );
-    (void) vfprintf( stderr, format, args );
-    fputc( '\n', stderr );
-    va_end( args );
-    exit( 1 );
-    }
-
-#ifdef NEED_VFPRINTF1
-
-/* Micro-vfprintf, for systems that don't have vfprintf but do have _doprnt.
-*/
-
-int
-vfprintf( stream, format, args )
-    FILE* stream;
-    char* format;
-    va_list args;
-    {
-    return _doprnt( format, args, stream );
-    }
-#endif /*NEED_VFPRINTF1*/
-
-#ifdef NEED_VFPRINTF2
-
-/* Portable mini-vfprintf, for systems that don't have either vfprintf or
-** _doprnt.  This depends only on fprintf.  If you don't have fprintf,
-** you might consider getting a new stdio library.
-*/
-
-int
-vfprintf( stream, format, args )
-    FILE* stream;
-    char* format;
-    va_list args;
-    {
-    int n;
-    char* ep;
-    char fchar;
-    char tformat[512];
-    int do_long;
-    int i;
-    long l;
-    unsigned u;
-    unsigned long ul;
-    char* s;
-    double d;
-
-    n = 0;
-    while ( *format != '\0' )
-	{
-	if ( *format != '%' )
-	    { /* Not special, just write out the char. */
-	    (void) putc( *format, stream );
-	    ++n;
-	    ++format;
-	    }
-	else
-	    {
-	    do_long = 0;
-	    ep = format + 1;
-
-	    /* Skip over all the field width and precision junk. */
-	    if ( *ep == '-' )
-		++ep;
-	    if ( *ep == '0' )
-		++ep;
-	    while ( isdigit( *ep ) )
-		++ep;
-	    if ( *ep == '.' )
-		{
-		++ep;
-		while ( isdigit( *ep ) )
-		    ++ep;
-		}
-	    if ( *ep == '#' )
-		++ep;
-	    if ( *ep == 'l' )
-		{
-		do_long = 1;
-		++ep;
-		}
-
-	    /* Here's the field type.  Extract it, and copy this format
-	    ** specifier to a temp string so we can add an end-of-string.
-	    */
-	    fchar = *ep;
-	    (void) strncpy( tformat, format, ep - format + 1 );
-	    tformat[ep - format + 1] = '\0';
-
-	    /* Now do a one-argument fprintf with the format string we have
-	    ** isolated.
-	    */
-	    switch ( fchar )
-		{
-		case 'd':
-		if ( do_long )
-		    {
-		    l = va_arg( args, long );
-		    n += fprintf( stream, tformat, l );
-		    }
-		else
-		    {
-		    i = va_arg( args, int );
-		    n += fprintf( stream, tformat, i );
-		    }
-		break;
-
-	        case 'o':
-	        case 'x':
-	        case 'X':
-	        case 'u':
-		if ( do_long )
-		    {
-		    ul = va_arg( args, unsigned long );
-		    n += fprintf( stream, tformat, ul );
-		    }
-		else
-		    {
-		    u = va_arg( args, unsigned );
-		    n += fprintf( stream, tformat, u );
-		    }
-		break;
-
-	        case 'c':
-		i = (char) va_arg( args, int );
-		n += fprintf( stream, tformat, i );
-		break;
-
-	        case 's':
-		s = va_arg( args, char* );
-		n += fprintf( stream, tformat, s );
-		break;
-
-	        case 'e':
-	        case 'E':
-	        case 'f':
-	        case 'g':
-	        case 'G':
-		d = va_arg( args, double );
-		n += fprintf( stream, tformat, d );
-		break;
-
-	        case '%':
-		(void) putc( '%', stream );
-		++n;
-		break;
-
-		default:
-		return -1;
-		}
-
-	    /* Resume formatting on the next character. */
-	    format = ep + 1;
-	    }
-	}
-    return nc;
-    }
-#endif /*NEED_VFPRINTF2*/
-
-
-/* File open/close that handles "-" as stdin and checks errors. */
-
-FILE*
-pm_openr( name )
-    char* name;
-    {
-    FILE* f;
-
-    if ( strcmp( name, "-" ) == 0 )
-	f = stdin;
-    else
-	{
-#ifdef MSDOS
-	f = fopen( name, "rb" );
-#else /*MSDOS*/
-	f = fopen( name, "r" );
-#endif /*MSDOS*/
-	if ( f == NULL )
-	    {
-	    pm_perror( name );
-	    exit( 1 );
-	    }
-	}
-    return f;
-    }
-
-FILE*
-pm_openw( name )
-    char* name;
-    {
-    FILE* f;
-
-#ifdef MSDOS
-    f = fopen( name, "wb" );
-#else /*MSDOS*/
-    f = fopen( name, "w" );
-#endif /*MSDOS*/
-    if ( f == NULL )
-	{
-	pm_perror( name );
-	exit( 1 );
-	}
-    return f;
-    }
-
-void
-pm_close( f )
-    FILE* f;
-    {
-    fflush( f );
-    if ( ferror( f ) )
-	pm_message( "a file read or write error occurred at some point" );
-    if ( f != stdin )
-	if ( fclose( f ) != 0 )
-	    pm_perror( "fclose" );
-    }
-
-/* Endian I/O.
-*/
-
-int
-pm_readbigshort( in, sP )
-    FILE* in;
-    short* sP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP = ( c & 0xff ) << 8;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP |= c & 0xff;
-    return 0;
-    }
-
-#if __STDC__
-int
-pm_writebigshort( FILE* out, short s )
-#else /*__STDC__*/
-int
-pm_writebigshort( out, s )
-    FILE* out;
-    short s;
-#endif /*__STDC__*/
-    {
-    (void) putc( ( s >> 8 ) & 0xff, out );
-    (void) putc( s & 0xff, out );
-    return 0;
-    }
-
-int
-pm_readbiglong( in, lP )
-    FILE* in;
-    long* lP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP = ( c & 0xff ) << 24;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 16;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 8;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= c & 0xff;
-    return 0;
-    }
-
-int
-pm_writebiglong( out, l )
-    FILE* out;
-    long l;
-    {
-    (void) putc( ( l >> 24 ) & 0xff, out );
-    (void) putc( ( l >> 16 ) & 0xff, out );
-    (void) putc( ( l >> 8 ) & 0xff, out );
-    (void) putc( l & 0xff, out );
-    return 0;
-    }
-
-int
-pm_readlittleshort( in, sP )
-    FILE* in;
-    short* sP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP = c & 0xff;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP |= ( c & 0xff ) << 8;
-    return 0;
-    }
-
-#if __STDC__
-int
-pm_writelittleshort( FILE* out, short s )
-#else /*__STDC__*/
-int
-pm_writelittleshort( out, s )
-    FILE* out;
-    short s;
-#endif /*__STDC__*/
-    {
-    (void) putc( s & 0xff, out );
-    (void) putc( ( s >> 8 ) & 0xff, out );
-    return 0;
-    }
-
-int
-pm_readlittlelong( in, lP )
-    FILE* in;
-    long* lP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP = c & 0xff;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 8;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 16;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 24;
-    return 0;
-    }
-
-int
-pm_writelittlelong( out, l )
-    FILE* out;
-    long l;
-    {
-    (void) putc( l & 0xff, out );
-    (void) putc( ( l >> 8 ) & 0xff, out );
-    (void) putc( ( l >> 16 ) & 0xff, out );
-    (void) putc( ( l >> 24 ) & 0xff, out );
-    return 0;
-    }
diff --git a/exsrc/src/pbmplus/pbmplus.h b/exsrc/src/pbmplus/pbmplus.h
deleted file mode 100644
index 7f868c83c86b9df002a4cdf1c0e2f3d3c4835e81..0000000000000000000000000000000000000000
--- a/exsrc/src/pbmplus/pbmplus.h
+++ /dev/null
@@ -1,192 +0,0 @@
-/* pbmplus.h - header file for PBM, PGM, PPM, and PNM
-**
-** Copyright (C) 1988, 1989, 1991 by Jef Poskanzer.
-**
-** Permission to use, copy, modify, and distribute this software and its
-** documentation for any purpose and without fee is hereby granted, provided
-** that the above copyright notice appear in all copies and that both that
-** copyright notice and this permission notice appear in supporting
-** documentation.  This software is provided "as is" without express or
-** implied warranty.
-*/
-
-#ifndef _PBMPLUS_H_
-#define _PBMPLUS_H_
-
-#include <sys/types.h>
-#include <ctype.h>
-#include <stdio.h>
-
-#if defined(USG) || defined(SVR4)
-#define SYSV
-#endif
-#if ! ( defined(BSD) || defined(SYSV) || defined(MSDOS) )
-/* CONFIGURE: If your system is >= 4.2BSD, set the BSD option; if you're a
-** System V site, set the SYSV option; and if you're IBM-compatible, set
-** MSDOS.  If your compiler is ANSI C, you're probably better off setting
-** SYSV - all it affects is string handling.
-*/
-#define BSD
-/* #define SYSV */
-/* #define MSDOS */
-#endif
-
-/* CONFIGURE: If you want to enable writing "raw" files, set this option.
-** "Raw" files are smaller, and much faster to read and write, but you
-** must have a filesystem that allows all 256 ASCII characters to be read
-** and written.  You will no longer be able to mail P?M files without 
-** using uuencode or the equivalent, or running the files through pnmnoraw.
-** Note that reading "raw" files works whether writing is enabled or not.
-*/
-#define PBMPLUS_RAWBITS
-
-/* CONFIGURE: PGM can store gray values as either bytes or shorts.  For most
-** applications, bytes will be big enough, and the memory savings can be
-** substantial.  However, if you need more than 8 bits of grayscale resolution,
-** then define this symbol.
-*/
-/* #define PGM_BIGGRAYS */
-
-/* CONFIGURE: Normally, PPM handles a pixel as a struct of three grays.
-** If grays are stored in bytes, that's 24 bits per color pixel; if
-** grays are stored as shorts, that's 48 bits per color pixel.  PPM
-** can also be configured to pack the three grays into a single longword,
-** 10 bits each, 30 bits per pixel.
-**
-** If you have configured PGM with the PGM_BIGGRAYS option, AND you don't
-** need more than 10 bits for each color component, AND you care more about
-** memory use than speed, then this option might be a win.  Under these
-** circumstances it will make some of the programs use 1.5 times less space,
-** but all of the programs will run about 1.4 times slower.
-**
-** If you are not using PGM_BIGGRAYS, then this option is useless -- it
-** doesn't save any space, but it still slows things down.
-*/
-/* #define PPM_PACKCOLORS */
-
-/* CONFIGURE: uncomment this to enable debugging checks. */
-/* #define DEBUG */
-
-#ifdef SYSV
-
-#include <string.h>
-#define index(s,c) strchr(s,c)
-#define rindex(s,c) strrchr(s,c)
-#define srandom(s) srand(s)
-#define random rand
-#define bzero(dst,len) memset(dst,0,len)
-#define bcopy(src,dst,len) memcpy(dst,src,len)
-#define bcmp memcmp
-extern void srand();
-extern int rand();
-
-#else /*SYSV*/
-
-#include <strings.h>
-extern void srandom();
-extern long random();
-
-#endif /*SYSV*/
-
-extern int atoi();
-extern void exit();
-extern long time();
-extern int write();
-
-/* CONFIGURE: On some systems, malloc.h doesn't declare these, so we have
-** to do it.  On other systems, for example HP/UX, it declares them
-** incompatibly.  And some systems, for example Dynix, don't have a
-** malloc.h at all.  A sad situation.  If you have compilation problems
-** that point here, feel free to tweak or remove these declarations.
-*/
-#include <sys/malloc.h>
-//extern char* malloc();
-//extern char* realloc();
-//extern char* calloc();
-
-/* CONFIGURE: Some systems don't have vfprintf(), which we need for the
-** error-reporting routines.  If you compile and get a link error about
-** this routine, uncomment the first define, which gives you a vfprintf
-** that uses the theoretically non-portable but fairly common routine
-** _doprnt().  If you then get a link error about _doprnt, or
-** message-printing doesn't look like it's working, try the second
-** define instead.
-*/
-/* #define NEED_VFPRINTF1 */
-/* #define NEED_VFPRINTF2 */
-
-/* End of configurable definitions. */
-
-
-#undef max
-#define max(a,b) ((a) > (b) ? (a) : (b))
-#undef min
-#define min(a,b) ((a) < (b) ? (a) : (b))
-#undef abs
-#define abs(a) ((a) >= 0 ? (a) : -(a))
-#undef odd
-#define odd(n) ((n) & 1)
-
-
-/* Definitions to make PBMPLUS work with either ANSI C or C Classic. */
-
-#if __STDC__
-#define ARGS(alist) alist
-#else /*__STDC__*/
-#define ARGS(alist) ()
-#define const
-#endif /*__STDC__*/
-
-
-/* Initialization. */
-
-void pm_init ARGS(( int* argcP, char* argv[] ));
-
-
-/* Variable-sized arrays definitions. */
-
-char** pm_allocarray ARGS(( int cols, int rows, int size ));
-char* pm_allocrow ARGS(( int cols, int size ));
-void pm_freearray ARGS(( char** its, int rows ));
-void pm_freerow ARGS(( char* itrow ));
-
-
-/* Case-insensitive keyword matcher. */
-
-int pm_keymatch ARGS(( char* str, char* keyword, int minchars ));
-
-
-/* Log base two hacks. */
-
-int pm_maxvaltobits ARGS(( int maxval ));
-int pm_bitstomaxval ARGS(( int bits ));
-
-
-/* Error handling definitions. */
-
-void pm_message ARGS(( char*, ... ));
-void pm_error ARGS(( char*, ... ));			/* doesn't return */
-void pm_perror ARGS(( char* reason ));			/* doesn't return */
-void pm_usage ARGS(( char* usage ));			/* doesn't return */
-
-
-/* File open/close that handles "-" as stdin and checks errors. */
-
-FILE* pm_openr ARGS(( char* name ));
-FILE* pm_openw ARGS(( char* name ));
-void pm_close ARGS(( FILE* f ));
-
-
-/* Endian I/O. */
-
-int pm_readbigshort ARGS(( FILE* in, short* sP ));
-int pm_writebigshort ARGS(( FILE* out, short s ));
-int pm_readbiglong ARGS(( FILE* in, long* lP ));
-int pm_writebiglong ARGS(( FILE* out, long l ));
-int pm_readlittleshort ARGS(( FILE* in, short* sP ));
-int pm_writelittleshort ARGS(( FILE* out, short s ));
-int pm_readlittlelong ARGS(( FILE* in, long* lP ));
-int pm_writelittlelong ARGS(( FILE* out, long l ));
-
-
-#endif /*_PBMPLUS_H_*/
diff --git a/exsrc/src/pbmplus/pnm/Makefile.in b/exsrc/src/pbmplus/pnm/Makefile.in
deleted file mode 100644
index e14ff6d7b9eec3621fddc1f32353104041c8c176..0000000000000000000000000000000000000000
--- a/exsrc/src/pbmplus/pnm/Makefile.in
+++ /dev/null
@@ -1,188 +0,0 @@
-# Makefile for pnm tools.
-#
-# Copyright (C) 1989, 1991 by Jef Poskanzer.
-#
-# Permission to use, copy, modify, and distribute this software and its
-# documentation for any purpose and without fee is hereby granted, provided
-# that the above copyright notice appear in all copies and that both that
-# copyright notice and this permission notice appear in supporting
-# documentation.  This software is provided "as is" without express or
-# implied warranty.
-
-# Default values, usually overridden by top-level Makefile.
-#CC =		cc
-CC =		gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return
-#CFLAGS =	-O
-CFLAGS =	-g -w
-#CFLAGS =	-g -O
-TIFFDEF =	-DLIBTIFF
-TIFFINC =	-I@EXTERNALS@/include
-TIFFLIB =	@EXTERNALS@/lib/libtiff.a
-TIFFBINARIES =  tifftopnm pnmtotiff
-TIFFOBJECTS =   tifftopnm.o pnmtotiff.o
-#LDFLAGS =	-s
-LDFLAGS =	
-INSTALLBINARIES =	@EXTERNALS@/bin
-INSTALLSCRIPTS =	$(INSTALLBINARIES)
-INSTALLMANUALS1 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS1 =	1
-INSTALLMANUALS3 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS3 =	3
-INSTALLMANUALS5 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS5 =	5
-MANCP =			cp
-
-PPMDIR =	../ppm
-INCLUDEPPM =	-I$(PPMDIR)
-LIBPPM =	$(PPMDIR)/libppm.a
-DEFPPM =	$(PPMDIR)/ppm.h
-DEFLIBPPM =	$(PPMDIR)/libppm.h
-
-PGMDIR =	../pgm
-INCLUDEPGM =	-I$(PGMDIR)
-LIBPGM =	$(PGMDIR)/libpgm.a
-DEFPGM =	$(PGMDIR)/pgm.h
-DEFLIBPGM =	$(PGMDIR)/libpgm.h
-
-PBMDIR =	../pbm
-INCLUDEPBM =	-I$(PBMDIR)
-LIBPBM =	$(PBMDIR)/libpbm.a
-DEFPBM =	$(PBMDIR)/pbm.h ../pbmplus.h
-DEFLIBPBM =	$(PBMDIR)/libpbm.h
-
-SHELL =		/bin/sh
-INCLUDE =	-I.. $(INCLUDEPPM) $(INCLUDEPGM) $(INCLUDEPBM)
-ALLCFLAGS =	$(CFLAGS) $(INCLUDE) $(TIFFDEF) $(TIFFINC)
-LIBPNM =	libpnm.a
-
-PORTBINARIES =	pnmarith pnmcat pnmconvol pnmcrop pnmcut \
-		pnmdepth pnmenlarge pnmfile pnmflip pnminvert \
-		pnmnoraw pnmpaste pnmscale pnmtile pnmtops \
-		pnmtorast pnmtoxwd rasttopnm xwdtopnm
-MATHBINARIES =	pnmgamma pnmrotate pnmshear
-BINARIES =      $(PORTBINARIES) $(MATHBINARIES) $(TIFFBINARIES)
-SCRIPTS =	anytopnm pnmindex pnmmargin pnmsmooth
-
-PORTOBJECTS =	pnmarith.o pnmcat.o pnmconvol.o pnmcrop.o pnmcut.o \
-		pnmdepth.o pnmenlarge.o pnmfile.o pnmflip.o pnminvert.o \
-		pnmnoraw.o pnmpaste.o pnmscale.o pnmtile.o pnmtops.o \
-		pnmtorast.o pnmtoxwd.o rasttopnm.o xwdtopnm.o \
-		pnmgamma.o pnmrotate.o pnmshear.o
-OBJECTS =	$(PORTOBJECTS) $(TIFFOBJECTS)
-
-MANUALS1 =	$(BINARIES) $(SCRIPTS)
-MANUALS3 =	libpnm
-MANUALS5 =	pnm
-
-
-#all:		binaries
-all:		merge
-#install:	install.bin
-install:	install.merge
-
-
-binaries:	$(BINARIES)
-
-install.bin:	binaries $(SCRIPTS)
-	cd $(INSTALLBINARIES) ; rm -f $(BINARIES)
-	cp $(BINARIES) $(INSTALLBINARIES)
-	cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS)
-	cp $(SCRIPTS) $(INSTALLSCRIPTS)
-	cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS)
-
-
-merge:		pnmmerge
-pnmmerge:	pnmmerge.c $(OBJECTS) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM)
-	$(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(OBJECTS) -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB)
-
-install.merge:	install.pnmmerge $(SCRIPTS)
-install.pnmmerge:	pnmmerge
-	cd $(INSTALLBINARIES) ; rm -f $(BINARIES)
-	cp pnmmerge $(INSTALLBINARIES)
-	cd $(INSTALLBINARIES) ; for i in $(BINARIES) ; do ln pnmmerge $$i ; done
-	rm $(INSTALLBINARIES)/pnmmerge
-	cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS)
-	cp $(SCRIPTS) $(INSTALLSCRIPTS)
-	cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS)
-
-
-install.man:
-	for i in $(MANUALS1) ; do \
-	    rm -f $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \
-	    $(MANCP) $$i.1 $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \
-	done
-	for i in $(MANUALS3) ; do \
-	    rm -f $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \
-	    $(MANCP) $$i.3 $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \
-	done
-	for i in $(MANUALS5) ; do \
-	    rm -f $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \
-	    $(MANCP) $$i.5 $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \
-	done
-
-
-# Rules for plain programs.
-$(PORTBINARIES) $(TIFFBINARIES):	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB)
-	$(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB)
-
-# Rule for math-dependent programs.
-$(MATHBINARIES):        pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) \
-			$(LIBPPM) $(LIBPGM) $(LIBPBM)
-	$(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM)
-
-# Rule for objects.
-$(OBJECTS):	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM)
-	$(CC) $(ALLCFLAGS) "-Dmain=$*_main" -c $*.c
-
-# And libraries.
-$(LIBPBM):
-	cd $(PBMDIR) ; make lib
-$(LIBPGM) FOO:
-	cd $(PGMDIR) ; make lib
-$(LIBPPM) BAR:
-	cd $(PPMDIR) ; make lib
-lib:		$(LIBPNM)
-$(LIBPNM):	libpnm1.o libpnm2.o libpnm3.o libpnm4.o
-	-rm -f $(LIBPNM)
-	ar rc $(LIBPNM) libpnm1.o libpnm2.o libpnm3.o libpnm4.o
-	-ranlib $(LIBPNM)
-
-libpnm1.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm1.c
-	$(CC) $(ALLCFLAGS) -c libpnm1.c
-libpnm2.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm2.c $(DEFLIBPPM) \
-		$(DEFLIBPGM) $(DEFLIBPBM)
-	$(CC) $(ALLCFLAGS) -c libpnm2.c
-libpnm3.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm3.c $(DEFLIBPPM) \
-		$(DEFLIBPGM) $(DEFLIBPBM)
-	$(CC) $(ALLCFLAGS) -c libpnm3.c
-libpnm4.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) rast.h libpnm4.c
-	$(CC) $(ALLCFLAGS) -c libpnm4.c
-
-# Other dependencies.
-pnmarith pnmarith.o:		pnmarith.c
-pnmcat pnmcat.o:		pnmcat.c
-pnmconvol pnmconvol.o:		pnmconvol.c
-pnmcrop pnmcrop.o:		pnmcrop.c
-pnmcut pnmcut.o:		pnmcut.c
-pnmdepth pnmdepth.o:		pnmdepth.c
-pnmenlarge pnmenlarge.o:	pnmenlarge.c
-pnmfile pnmfile.o:		pnmfile.c
-pnmflip pnmflip.o:		pnmflip.c
-pnmgamma pnmgamma.o:		pnmgamma.c
-pnminvert pnminvert.o:		pnminvert.c
-pnmnoraw pnmnoraw.o:		pnmnoraw.c
-pnmpaste pnmpaste.o:		pnmpaste.c
-pnmrotate pnmrotate.o:		pnmrotate.c
-pnmscale pnmscale.o:		pnmscale.c
-pnmshear pnmshear.o:		pnmshear.c
-pnmtile pnmtile.o:		pnmtile.c
-pnmtops pnmtops.o:		pnmtops.c
-pnmtorast pnmtorast.o:		pnmtorast.c rast.h
-pnmtotiff pnmtotiff.o:		pnmtotiff.c
-pnmtoxwd pnmtoxwd.o:		pnmtoxwd.c x11wd.h
-rasttopnm rasttopnm.o:		rasttopnm.c rast.h
-tifftopnm tifftopnm.o:		tifftopnm.c
-xwdtopnm xwdtopnm.o:		xwdtopnm.c x10wd.h x11wd.h
-
-clean:
-	-rm -f *.o *.a *.cat core $(BINARIES) pnmmerge
diff --git a/exsrc/src/png/pngconf.h b/exsrc/src/png/pngconf.h
deleted file mode 100644
index e185438ca466e4e26b7c7a86b40859bd359fc23a..0000000000000000000000000000000000000000
--- a/exsrc/src/png/pngconf.h
+++ /dev/null
@@ -1,632 +0,0 @@
-
-/* pngconf.h - machine configurable file for libpng
- *
- * libpng version 1.5.1 - February 3, 2011
- *
- * Copyright (c) 1998-2011 Glenn Randers-Pehrson
- * (Version 0.96 Copyright (c) 1996, 1997 Andreas Dilger)
- * (Version 0.88 Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.)
- *
- * This code is released under the libpng license.
- * For conditions of distribution and use, see the disclaimer
- * and license in png.h
- *
- */
-
-/* Any machine specific code is near the front of this file, so if you
- * are configuring libpng for a machine, you may want to read the section
- * starting here down to where it starts to typedef png_color, png_text,
- * and png_info.
- */
-
-#ifndef PNGCONF_H
-#define PNGCONF_H
-
-/* PNG_NO_LIMITS_H may be used to turn off the use of the standard C
- * definition file for  machine specific limits, this may impact the
- * correctness of the definitons below (see uses of INT_MAX).
- */
-#ifndef PNG_NO_LIMITS_H
-#  include <limits.h>
-#endif
-
-/* For the memory copy APIs (i.e. the standard definitions of these),
- * because this file defines png_memcpy and so on the base APIs must
- * be defined here.
- */
-#ifdef BSD
-#  include <strings.h>
-#else
-#  include <string.h>
-#endif
-
-/* For png_FILE_p - this provides the standard definition of a
- * FILE
- */
-#ifdef PNG_STDIO_SUPPORTED
-#  include <stdio.h>
-#endif
-
-/* This controls optimization of the reading of 16 and 32 bit values
- * from PNG files.  It can be set on a per-app-file basis - it
- * just changes whether a macro is used to the function is called.
- * The library builder sets the default, if read functions are not
- * built into the library the macro implementation is forced on.
- */
-#ifndef PNG_READ_INT_FUNCTIONS_SUPPORTED
-#  define PNG_USE_READ_MACROS
-#endif
-#if !defined(PNG_NO_USE_READ_MACROS) && !defined(PNG_USE_READ_MACROS)
-#  if PNG_DEFAULT_READ_MACROS
-#    define PNG_USE_READ_MACROS
-#  endif
-#endif
-
-/* COMPILER SPECIFIC OPTIONS.
- *
- * These options are provided so that a variety of difficult compilers
- * can be used.  Some are fixed at build time (e.g. PNG_API_RULE
- * below) but still have compiler specific implementations, others
- * may be changed on a per-file basis when compiling against libpng.
- */
-
-/* The PNGARG macro protects us against machines that don't have function
- * prototypes (ie K&R style headers).  If your compiler does not handle
- * function prototypes, define this macro and use the included ansi2knr.
- * I've always been able to use _NO_PROTO as the indicator, but you may
- * need to drag the empty declaration out in front of here, or change the
- * ifdef to suit your own needs.
- */
-#ifndef PNGARG
-
-#  ifdef OF /* zlib prototype munger */
-#    define PNGARG(arglist) OF(arglist)
-#  else
-
-#    ifdef _NO_PROTO
-#      define PNGARG(arglist) ()
-#    else
-#      define PNGARG(arglist) arglist
-#    endif /* _NO_PROTO */
-
-#  endif /* OF */
-
-#endif /* PNGARG */
-
-/* Function calling conventions.
- * =============================
- * Normally it is not necessary to specify to the compiler how to call
- * a function - it just does it - however on x86 systems derived from
- * Microsoft and Borland C compilers ('IBM PC', 'DOS', 'Windows' systems
- * and some others) there are multiple ways to call a function and the
- * default can be changed on the compiler command line.  For this reason
- * libpng specifies the calling convention of every exported function and
- * every function called via a user supplied function pointer.  This is
- * done in this file by defining the following macros:
- *
- * PNGAPI    Calling convention for exported functions.
- * PNGCBAPI  Calling convention for user provided (callback) functions.
- * PNGCAPI   Calling convention used by the ANSI-C library (required
- *           for longjmp callbacks and sometimes used internally to
- *           specify the calling convention for zlib).
- *
- * These macros should never be overridden.  If it is necessary to
- * change calling convention in a private build this can be done
- * by setting PNG_API_RULE (which defaults to 0) to one of the values
- * below to select the correct 'API' variants.
- *
- * PNG_API_RULE=0 Use PNGCAPI - the 'C' calling convention - throughout.
- *                This is correct in every known environment.
- * PNG_API_RULE=1 Use the operating system convention for PNGAPI and
- *                the 'C' calling convention (from PNGCAPI) for
- *                callbacks (PNGCBAPI).  This is no longer required
- *                in any known environment - if it has to be used
- *                please post an explanation of the problem to the
- *                libpng mailing list.
- *
- * These cases only differ if the operating system does not use the C
- * calling convention, at present this just means the above cases
- * (x86 DOS/Windows sytems) and, even then, this does not apply to
- * Cygwin running on those systems.
- *
- * Note that the value must be defined in pnglibconf.h so that what
- * the application uses to call the library matches the conventions
- * set when building the library.
- */
-
-/* Symbol export
- * =============
- * When building a shared library it is almost always necessary to tell
- * the compiler which symbols to export.  The png.h macro 'PNG_EXPORT'
- * is used to mark the symbols.  On some systems these symbols can be
- * extracted at link time and need no special processing by the compiler,
- * on other systems the symbols are flagged by the compiler and just
- * the declaration requires a special tag applied (unfortunately) in a
- * compiler dependent way.  Some systems can do either.
- *
- * A small number of older systems also require a symbol from a DLL to
- * be flagged to the program that calls it.  This is a problem because
- * we do not know in the header file included by application code that
- * the symbol will come from a shared library, as opposed to a statically
- * linked one.  For this reason the application must tell us by setting
- * the magic flag PNG_USE_DLL to turn on the special processing before
- * it includes png.h.
- *
- * Four additional macros are used to make this happen:
- *
- * PNG_IMPEXP The magic (if any) to cause a symbol to be exported from
- *            the build or imported if PNG_USE_DLL is set - compiler
- *            and system specific.
- *
- * PNG_EXPORT_TYPE(type) A macro that pre or appends PNG_IMPEXP to
- *                       'type', compiler specific.
- *
- * PNG_DLL_EXPORT Set to the magic to use during a libpng build to
- *                make a symbol exported from the DLL.
- *
- * PNG_DLL_IMPORT Set to the magic to force the libpng symbols to come
- *                from a DLL - used to define PNG_IMPEXP when
- *                PNG_USE_DLL is set.
- */
-
-/* System specific discovery.
- * ==========================
- * This code is used at build time to find PNG_IMPEXP, the API settings
- * and PNG_EXPORT_TYPE(), it may also set a macro to indicate the DLL
- * import processing is possible.  On Windows/x86 systems it also sets
- * compiler-specific macros to the values required to change the calling
- * conventions of the various functions.
- */
-#if ( defined(_Windows) || defined(_WINDOWS) || defined(WIN32) ||\
-      defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__) ) &&\
-    ( defined(_X86_) || defined(_X64_) || defined(_M_IX86) ||\
-      defined(_M_X64) || defined(_M_IA64) )
-  /* Windows system (DOS doesn't support DLLs) running on x86/x64.  Includes
-   * builds under Cygwin or MinGW.  Also includes Watcom builds but these need
-   * special treatment because they are not compatible with GCC or Visual C
-   * because of different calling conventions.
-   */
-#  if PNG_API_RULE == 2
-    /* If this line results in an error, either because __watcall is not
-     * understood or because of a redefine just below you cannot use *this*
-     * build of the library with the compiler you are using.  *This* build was
-     * build using Watcom and applications must also be built using Watcom!
-     */
-#    define PNGCAPI __watcall
-#  endif
-
-#  if defined(__GNUC__) || (defined (_MSC_VER) && (_MSC_VER >= 800))
-#    define PNGCAPI __cdecl
-#    if PNG_API_RULE == 1
-#      define PNGAPI __stdcall
-#    endif
-#  else
-    /* An older compiler, or one not detected (erroneously) above,
-     * if necessary override on the command line to get the correct
-     * variants for the compiler.
-     */
-#    ifndef PNGCAPI
-#      define PNGCAPI _cdecl
-#    endif
-#    if PNG_API_RULE == 1 && !defined(PNGAPI)
-#      define PNGAPI _stdcall
-#    endif
-#  endif /* compiler/api */
-  /* NOTE: PNGCBAPI always defaults to PNGCAPI. */
-
-#  if defined(PNGAPI) && !defined(PNG_USER_PRIVATEBUILD)
-   ERROR: PNG_USER_PRIVATEBUILD must be defined if PNGAPI is changed
-#  endif
-
-#  if (defined(_MSC_VER) && _MSC_VER < 800) ||\
-      (defined(__BORLANDC__) && __BORLANDC__ < 0x500)
-    /* older Borland and MSC
-     * compilers used '__export' and required this to be after
-     * the type.
-     */
-#    ifndef PNG_EXPORT_TYPE
-#      define PNG_EXPORT_TYPE(type) type PNG_IMPEXP
-#    endif
-#    define PNG_DLL_EXPORT __export
-#  else /* newer compiler */
-#    define PNG_DLL_EXPORT __declspec(dllexport)
-#    ifndef PNG_DLL_IMPORT
-#      define PNG_DLL_IMPORT __declspec(dllimport)
-#    endif
-#  endif /* compiler */
-
-#else /* !Windows/x86 */
-#  if (defined(__IBMC__) || defined(__IBMCPP__)) && defined(__OS2__)
-#    define PNGAPI _System
-#  else /* !Windows/x86 && !OS/2 */
-    /* Use the defaults, or define PNG*API on the command line (but
-     * this will have to be done for every compile!)
-     */
-#  endif /* other system, !OS/2 */
-#endif /* !Windows/x86 */
-
-/* Now do all the defaulting . */
-#ifndef PNGCAPI
-#  define PNGCAPI
-#endif
-#ifndef PNGCBAPI
-#  define PNGCBAPI PNGCAPI
-#endif
-#ifndef PNGAPI
-#  define PNGAPI PNGCAPI
-#endif
-
-/* The default for PNG_IMPEXP depends on whether the library is
- * being built or used.
- */
-#ifndef PNG_IMPEXP
-#  ifdef PNGLIB_BUILD
-    /* Building the library */
-#    if (defined(DLL_EXPORT)/*from libtool*/ ||\
-        defined(_WINDLL) || defined(_DLL) || defined(__DLL__) ||\
-        defined(_USRDLL) ||\
-        defined(PNG_BUILD_DLL)) && defined(PNG_DLL_EXPORT)
-      /* Building a DLL. */
-#      define PNG_IMPEXP PNG_DLL_EXPORT
-#    endif /* DLL */
-#  else
-    /* Using the library */
-#    if defined(PNG_USE_DLL) && defined(PNG_DLL_IMPORT)
-      /* This forces use of a DLL, disallowing static linking */
-#      define PNG_IMPEXP PNG_DLL_IMPORT
-#    endif
-#  endif
-
-#  ifndef PNG_IMPEXP
-#    define PNG_IMPEXP
-#  endif
-#endif
-
-/* THe following complexity is concerned with getting the 'attributes' of the
- * declared function in the correct place.  This potentially requires a separate
- * PNG_EXPORT function for every compiler.
- */
-#ifndef PNG_FUNCTION
-#  if defined (__GNUC__) && !defined(__clang__)
-#     define PNG_FUNCTION(type, name, args, attributes)\
-         attributes type name args
-#  else /* !GNUC */
-#     ifdef _MSC_VER
-#        define PNG_FUNCTION(type, name, args, attributes)\
-         attributes type name args
-#     else /* !MSC */
-#        define PNG_FUNCTION(type, name, args, attributes)\
-            type name args
-#     endif
-#  endif
-#endif
-
-#ifndef PNG_EXPORT_TYPE
-#  define PNG_EXPORT_TYPE(type) PNG_IMPEXP type
-#endif
-
-   /* The ordinal value is only relevant when preprocessing png.h for symbol
-    * table entries, so we discard it here.  See the .dfn files in the
-    * scripts directory.
-    */
-#ifndef PNG_EXPORTA
-#  define PNG_EXPORTA(ordinal, type, name, args, attributes)\
-      extern PNG_FUNCTION(PNG_EXPORT_TYPE(type),(PNGAPI name),PNGARG(args),\
-         attributes)
-#endif
-
-#define PNG_EXPORT(ordinal, type, name, args)\
-   PNG_EXPORTA(ordinal, type, name, args, )
-
-/* Use PNG_REMOVED to comment out a removed interface. */
-#ifndef PNG_REMOVED
-#  define PNG_REMOVED(ordinal, type, name, args, attributes)
-#endif
-
-#ifndef PNG_CALLBACK
-#  define PNG_CALLBACK(type, name, args, attributes)\
-   type (PNGCBAPI name) PNGARG(args) attributes
-#endif
-
-/* Support for compiler specific function attributes.  These are used
- * so that where compiler support is available incorrect use of API
- * functions in png.h will generate compiler warnings.
- *
- * Added at libpng-1.2.41.
- */
-
-#ifndef PNG_NO_PEDANTIC_WARNINGS
-#  ifndef PNG_PEDANTIC_WARNINGS_SUPPORTED
-#    define PNG_PEDANTIC_WARNINGS_SUPPORTED
-#  endif
-#endif
-
-#ifdef PNG_PEDANTIC_WARNINGS_SUPPORTED
-  /* Support for compiler specific function attributes.  These are used
-   * so that where compiler support is available incorrect use of API
-   * functions in png.h will generate compiler warnings.  Added at libpng
-   * version 1.2.41.
-   */
-#  if defined (__GNUC__) && !defined(__clang__)
-#    ifndef PNG_USE_RESULT
-#      define PNG_USE_RESULT __attribute__((__warn_unused_result__))
-#    endif
-#    ifndef PNG_NORETURN
-#      define PNG_NORETURN   __attribute__((__noreturn__))
-#    endif
-#    ifndef PNG_PTR_NORETURN
-#      define PNG_PTR_NORETURN   __attribute__((__noreturn__))
-#    endif
-#    ifndef PNG_ALLOCATED
-#      define PNG_ALLOCATED  __attribute__((__malloc__))
-#    endif
-
-    /* This specifically protects structure members that should only be
-     * accessed from within the library, therefore should be empty during
-     * a library build.
-     */
-#    ifndef PNGLIB_BUILD
-#      ifndef PNG_DEPRECATED
-#        define PNG_DEPRECATED __attribute__((__deprecated__))
-#      endif
-#      ifndef PNG_DEPSTRUCT
-#        define PNG_DEPSTRUCT  __attribute__((__deprecated__))
-#      endif
-#      ifndef PNG_PRIVATE
-#        if 0 /* Doesn't work so we use deprecated instead*/
-#          define PNG_PRIVATE \
-            __attribute__((warning("This function is not exported by libpng.")))
-#        else
-#          define PNG_PRIVATE \
-            __attribute__((__deprecated__))
-#        endif
-#      endif /* PNG_PRIVATE */
-#    endif /* PNGLIB_BUILD */
-#  endif /* __GNUC__ */
-#  ifdef _MSC_VER /* may need to check value */
-#    ifndef PNG_USE_RESULT
-#      define PNG_USE_RESULT /*not supported*/
-#    endif
-#    ifndef PNG_NORETURN
-#      define PNG_NORETURN   __declspec(noreturn)
-#    endif
-#    ifndef PNG_PTR_NORETURN
-#      define PNG_PTR_NORETURN /*not supported*/
-#    endif
-#    ifndef PNG_ALLOCATED
-#      define PNG_ALLOCATED __declspec(restrict)
-#    endif
-
-    /* This specifically protects structure members that should only be
-     * accessed from within the library, therefore should be empty during
-     * a library build.
-     */
-#    ifndef PNGLIB_BUILD
-#      ifndef PNG_DEPRECATED
-#        define PNG_DEPRECATED __declspec(deprecated)
-#      endif
-#      ifndef PNG_DEPSTRUCT
-#        define PNG_DEPSTRUCT  __declspec(deprecated)
-#      endif
-#      ifndef PNG_PRIVATE
-#        define PNG_PRIVATE __declspec(deprecated)
-#      endif /* PNG_PRIVATE */
-#    endif /* PNGLIB_BUILD */
-#  endif /* __GNUC__ */
-#endif /* PNG_PEDANTIC_WARNINGS */
-
-#ifndef PNG_DEPRECATED
-#  define PNG_DEPRECATED  /* Use of this function is deprecated */
-#endif
-#ifndef PNG_USE_RESULT
-#  define PNG_USE_RESULT  /* The result of this function must be checked */
-#endif
-#ifndef PNG_NORETURN
-#  define PNG_NORETURN    /* This function does not return */
-#endif
-#ifndef PNG_ALLOCATED
-#  define PNG_ALLOCATED   /* The result of the function is new memory */
-#endif
-#ifndef PNG_DEPSTRUCT
-#  define PNG_DEPSTRUCT   /* Access to this struct member is deprecated */
-#endif
-#ifndef PNG_PRIVATE
-#  define PNG_PRIVATE     /* This is a private libpng function */
-#endif
-#ifndef PNG_FP_EXPORT     /* A floating point API. */
-#  ifdef PNG_FLOATING_POINT_SUPPORTED
-#     define PNG_FP_EXPORT(ordinal, type, name, args)\
-         PNG_EXPORT(ordinal, type, name, args)
-#  else                   /* No floating point APIs */
-#     define PNG_FP_EXPORT(ordinal, type, name, args)
-#  endif
-#endif
-#ifndef PNG_FIXED_EXPORT  /* A fixed point API. */
-#  ifdef PNG_FIXED_POINT_SUPPORTED
-#     define PNG_FIXED_EXPORT(ordinal, type, name, args)\
-         PNG_EXPORT(ordinal, type, name, args)
-#  else                   /* No fixed point APIs */
-#     define PNG_FIXED_EXPORT(ordinal, type, name, args)
-#  endif
-#endif
-
-/* The following uses const char * instead of char * for error
- * and warning message functions, so some compilers won't complain.
- * If you do not want to use const, define PNG_NO_CONST here.
- *
- * This should not change how the APIs are called, so it can be done
- * on a per-file basis in the application.
- */
-#ifndef PNG_CONST
-#  ifndef PNG_NO_CONST
-#    define PNG_CONST const
-#  else
-#    define PNG_CONST
-#  endif
-#endif
-
-/* Some typedefs to get us started.  These should be safe on most of the
- * common platforms.  The typedefs should be at least as large as the
- * numbers suggest (a png_uint_32 must be at least 32 bits long), but they
- * don't have to be exactly that size.  Some compilers dislike passing
- * unsigned shorts as function parameters, so you may be better off using
- * unsigned int for png_uint_16.
- */
-
-#if defined(INT_MAX) && (INT_MAX > 0x7ffffffeL)
-typedef unsigned int png_uint_32;
-typedef int png_int_32;
-#else
-typedef unsigned long png_uint_32;
-typedef long png_int_32;
-#endif
-typedef unsigned short png_uint_16;
-typedef short png_int_16;
-typedef unsigned char png_byte;
-
-#ifdef PNG_NO_SIZE_T
-typedef unsigned int png_size_t;
-#else
-typedef size_t png_size_t;
-#endif
-#define png_sizeof(x) (sizeof (x))
-
-/* The following is needed for medium model support.  It cannot be in the
- * pngpriv.h header.  Needs modification for other compilers besides
- * MSC.  Model independent support declares all arrays and pointers to be
- * large using the far keyword.  The zlib version used must also support
- * model independent data.  As of version zlib 1.0.4, the necessary changes
- * have been made in zlib.  The USE_FAR_KEYWORD define triggers other
- * changes that are needed. (Tim Wegner)
- */
-
-/* Separate compiler dependencies (problem here is that zlib.h always
- * defines FAR. (SJT)
- */
-#ifdef __BORLANDC__
-#  if defined(__LARGE__) || defined(__HUGE__) || defined(__COMPACT__)
-#    define LDATA 1
-#  else
-#    define LDATA 0
-#  endif
-  /* GRR:  why is Cygwin in here?  Cygwin is not Borland C... */
-#  if !defined(__WIN32__) && !defined(__FLAT__) && !defined(__CYGWIN__)
-#    define PNG_MAX_MALLOC_64K /* only used in build */
-#    if (LDATA != 1)
-#      ifndef FAR
-#        define FAR __far
-#      endif
-#      define USE_FAR_KEYWORD
-#    endif   /* LDATA != 1 */
-         /* Possibly useful for moving data out of default segment.
-          * Uncomment it if you want. Could also define FARDATA as
-          * const if your compiler supports it. (SJT)
-#        define FARDATA FAR
-          */
-#  endif  /* __WIN32__, __FLAT__, __CYGWIN__ */
-#endif   /* __BORLANDC__ */
-
-
-/* Suggest testing for specific compiler first before testing for
- * FAR.  The Watcom compiler defines both __MEDIUM__ and M_I86MM,
- * making reliance oncertain keywords suspect. (SJT)
- */
-
-/* MSC Medium model */
-#ifdef FAR
-#  ifdef M_I86MM
-#    define USE_FAR_KEYWORD
-#    define FARDATA FAR
-#    include <dos.h>
-#  endif
-#endif
-
-/* SJT: default case */
-#ifndef FAR
-#  define FAR
-#endif
-
-/* At this point FAR is always defined */
-#ifndef FARDATA
-#  define FARDATA
-#endif
-
-/* Typedef for floating-point numbers that are converted
- * to fixed-point with a multiple of 100,000, e.g., gamma
- */
-typedef png_int_32 png_fixed_point;
-
-/* Add typedefs for pointers */
-typedef void                      FAR * png_voidp;
-typedef PNG_CONST void            FAR * png_const_voidp;
-typedef png_byte                  FAR * png_bytep;
-typedef PNG_CONST png_byte        FAR * png_const_bytep;
-typedef png_uint_32               FAR * png_uint_32p;
-typedef PNG_CONST png_uint_32     FAR * png_const_uint_32p;
-typedef png_int_32                FAR * png_int_32p;
-typedef PNG_CONST png_int_32      FAR * png_const_int_32p;
-typedef png_uint_16               FAR * png_uint_16p;
-typedef PNG_CONST png_uint_16     FAR * png_const_uint_16p;
-typedef png_int_16                FAR * png_int_16p;
-typedef PNG_CONST png_int_16      FAR * png_const_int_16p;
-typedef char                      FAR * png_charp;
-typedef PNG_CONST char            FAR * png_const_charp;
-typedef png_fixed_point           FAR * png_fixed_point_p;
-typedef PNG_CONST png_fixed_point FAR * png_const_fixed_point_p;
-typedef png_size_t                FAR * png_size_tp;
-typedef PNG_CONST png_size_t      FAR * png_const_size_tp;
-
-#ifdef PNG_STDIO_SUPPORTED
-typedef FILE            * png_FILE_p;
-#endif
-
-#ifdef PNG_FLOATING_POINT_SUPPORTED
-typedef double           FAR * png_doublep;
-typedef PNG_CONST double FAR * png_const_doublep;
-#endif
-
-/* Pointers to pointers; i.e. arrays */
-typedef png_byte        FAR * FAR * png_bytepp;
-typedef png_uint_32     FAR * FAR * png_uint_32pp;
-typedef png_int_32      FAR * FAR * png_int_32pp;
-typedef png_uint_16     FAR * FAR * png_uint_16pp;
-typedef png_int_16      FAR * FAR * png_int_16pp;
-typedef PNG_CONST char  FAR * FAR * png_const_charpp;
-typedef char            FAR * FAR * png_charpp;
-typedef png_fixed_point FAR * FAR * png_fixed_point_pp;
-#ifdef PNG_FLOATING_POINT_SUPPORTED
-typedef double          FAR * FAR * png_doublepp;
-#endif
-
-/* Pointers to pointers to pointers; i.e., pointer to array */
-typedef char            FAR * FAR * FAR * png_charppp;
-
-/* png_alloc_size_t is guaranteed to be no smaller than png_size_t,
- * and no smaller than png_uint_32.  Casts from png_size_t or png_uint_32
- * to png_alloc_size_t are not necessary; in fact, it is recommended
- * not to use them at all so that the compiler can complain when something
- * turns out to be problematic.
- * Casts in the other direction (from png_alloc_size_t to png_size_t or
- * png_uint_32) should be explicitly applied; however, we do not expect
- * to encounter practical situations that require such conversions.
- */
-#if defined(__TURBOC__) && !defined(__FLAT__)
-   typedef unsigned long png_alloc_size_t;
-#else
-#  if defined(_MSC_VER) && defined(MAXSEG_64K)
-     typedef unsigned long    png_alloc_size_t;
-#  else
-     /* This is an attempt to detect an old Windows system where (int) is
-      * actually 16 bits, in that case png_malloc must have an argument with a
-      * bigger size to accomodate the requirements of the library.
-      */
-#    if (defined(_Windows) || defined(_WINDOWS) || defined(_WINDOWS_)) && \
-        (!defined(INT_MAX) || INT_MAX <= 0x7ffffffeL)
-       typedef DWORD         png_alloc_size_t;
-#    else
-       typedef png_size_t    png_alloc_size_t;
-#    endif
-#  endif
-#endif
-
-#endif /* PNGCONF_H */
diff --git a/exsrc/src/readline/shobj-conf b/exsrc/src/readline/shobj-conf
deleted file mode 100644
index 663869a81995a1f4a2f27676e3abe83b4abb3763..0000000000000000000000000000000000000000
--- a/exsrc/src/readline/shobj-conf
+++ /dev/null
@@ -1,579 +0,0 @@
-#! /bin/sh
-#
-# shobj-conf -- output a series of variable assignments to be substituted
-#		into a Makefile by configure which specify system-dependent
-#		information for creating shared objects that may be loaded
-#		into bash with `enable -f'
-#
-# usage: shobj-conf [-C compiler] -c host_cpu -o host_os -v host_vendor
-#
-# Chet Ramey
-# chet@po.cwru.edu
-
-#   Copyright (C) 1996-2009 Free Software Foundation, Inc.
-#
-#   This file is part of GNU Bash, the Bourne Again SHell.
-#
-#   This program is free software: you can redistribute it and/or modify
-#   it under the terms of the GNU General Public License as published by
-#   the Free Software Foundation, either version 3 of the License, or
-#   (at your option) any later version.
-#
-#   This program is distributed in the hope that it will be useful,
-#   but WITHOUT ANY WARRANTY; without even the implied warranty of
-#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#   GNU General Public License for more details.
-#
-#   You should have received a copy of the GNU General Public License
-#   along with this program.  If not, see <http://www.gnu.org/licenses/>.
-#
-
-#
-# defaults
-#
-SHOBJ_STATUS=supported
-SHLIB_STATUS=supported
-
-SHOBJ_CC=cc
-SHOBJ_CFLAGS=
-SHOBJ_LD=
-SHOBJ_LDFLAGS=
-SHOBJ_XLDFLAGS=
-SHOBJ_LIBS=
-
-SHLIB_XLDFLAGS=
-SHLIB_LIBS='-ltermcap'
-
-SHLIB_DOT='.'
-SHLIB_LIBPREF='lib'
-SHLIB_LIBSUFF='so'
-
-SHLIB_LIBVERSION='$(SHLIB_LIBSUFF)'
-SHLIB_DLLVERSION='$(SHLIB_MAJOR)'
-
-PROGNAME=`basename $0`
-USAGE="$PROGNAME [-C compiler] -c host_cpu -o host_os -v host_vendor"
-
-while [ $# -gt 0 ]; do
-	case "$1" in
-	-C)	shift; SHOBJ_CC="$1"; shift ;;
-	-c)	shift; host_cpu="$1"; shift ;;
-	-o)	shift; host_os="$1"; shift ;;
-	-v)	shift; host_vendor="$1"; shift ;;
-	*)	echo "$USAGE" >&2 ; exit 2;;
-	esac
-done
-
-case "${host_os}-${SHOBJ_CC}-${host_vendor}" in
-sunos4*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD=/usr/bin/ld
-	SHOBJ_LDFLAGS='-assert pure-text'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-sunos4*)
-	SHOBJ_CFLAGS=-pic
-	SHOBJ_LD=/usr/bin/ld
-	SHOBJ_LDFLAGS='-assert pure-text'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-sunos5*-*gcc*|solaris2*-*gcc*)
-	SHOBJ_LD='${CC}'
-	ld_used=`gcc -print-prog-name=ld`
-	if ${ld_used} -V 2>&1 | grep GNU >/dev/null 2>&1; then
-		# This line works for the GNU ld
-		SHOBJ_LDFLAGS='-shared -Wl,-h,$@'
-		# http://sourceware.org/ml/binutils/2001-08/msg00361.html
-		SHOBJ_CFLAGS=-fPIC
-	else
-		# This line works for the Solaris linker in /usr/ccs/bin/ld
-		SHOBJ_LDFLAGS='-shared -Wl,-i -Wl,-h,$@'
-		SHOBJ_CFLAGS=-fpic
-	fi
-
-#	SHLIB_XLDFLAGS='-R $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sunos5*|solaris2*)
-	SHOBJ_CFLAGS='-K pic'
-	SHOBJ_LD=/usr/ccs/bin/ld
-	SHOBJ_LDFLAGS='-G -dy -z text -i -h $@'
-
-#	SHLIB_XLDFLAGS='-R $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-# All versions of Linux (including Gentoo/FreeBSD) or the semi-mythical GNU Hurd.
-linux*-*|gnu*-*|k*bsd*-gnu-*|freebsd*-gentoo)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-freebsd2*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-x -Bshareable'
-
-	SHLIB_XLDFLAGS='-R$(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-# FreeBSD-3.x ELF
-freebsd3*|freebsdaout*)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-
-	if [ -x /usr/bin/objformat ] && [ "`/usr/bin/objformat`" = "elf" ]; then
-		SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-		SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)'
-		SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	else
-		SHOBJ_LDFLAGS='-shared'
-
-		SHLIB_XLDFLAGS='-R$(libdir)'
-		SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	fi
-	;;
-
-# FreeBSD-4.x and later have only ELF
-freebsd[4-9]*|freebsdelf*|dragonfly*)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-	SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-# Darwin/MacOS X
-darwin[89]*|darwin10*)
-	SHOBJ_STATUS=supported
-	SHLIB_STATUS=supported
-	
-	SHOBJ_CFLAGS='-fno-common'
-
-	SHOBJ_LD='MACOSX_DEPLOYMENT_TARGET=10.3 ${CC}'
-
-	SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBSUFF='dylib'
-
-	SHOBJ_LDFLAGS='-dynamiclib -dynamic -undefined dynamic_lookup -arch_only `/usr/bin/arch`'
-	SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v'
-
-	SHLIB_LIBS='-lncurses'	# see if -lcurses works on MacOS X 10.1 
-	;;
-
-darwin*|macosx*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=supported
-
-	SHOBJ_CFLAGS='-fno-common'
-
-	SHOBJ_LD='${CC}'
-
-	SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBSUFF='dylib'
-
-	case "${host_os}" in
-	darwin[789]*|darwin10*)	SHOBJ_LDFLAGS=''
-			SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v'
-			;;
-	*)		SHOBJ_LDFLAGS='-dynamic'
-			SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v'
-			;;
-	esac
-
-	SHLIB_LIBS='-lncurses'	# see if -lcurses works on MacOS X 10.1 
-	;;
-
-openbsd*|netbsd*)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_XLDFLAGS='-R$(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-bsdi2*)
-	SHOBJ_CC=shlicc2
-	SHOBJ_CFLAGS=
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS=-r
-	SHOBJ_LIBS=-lc_s.2.1.0
-
-	# BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in
-	# the ass -- they require changing {/usr/lib,etc}/shlib.map on
-	# each system, and the library creation process is byzantine
-	SHLIB_STATUS=unsupported
-	;;
-
-bsdi3*)
-	SHOBJ_CC=shlicc2
-	SHOBJ_CFLAGS=
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS=-r
-	SHOBJ_LIBS=-lc_s.3.0.0
-
-	# BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in
-	# the ass -- they require changing {/usr/lib,etc}/shlib.map on
-	# each system, and the library creation process is byzantine
-	SHLIB_STATUS=unsupported
-	;;
-
-bsdi4*)
-	# BSD/OS 4.x now supports ELF and SunOS-style dynamically-linked
-	# shared libraries.  gcc 2.x is the standard compiler, and the
-	# `normal' gcc options should work as they do in Linux.
-
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-osf*-*gcc*)
-	# Fix to use gcc linker driver from bfischer@TechFak.Uni-Bielefeld.DE
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-rpath $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-osf*)
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-shared -soname $@ -expect_unresolved "*"'
-
-	SHLIB_XLDFLAGS='-rpath $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-aix4.[2-9]*-*gcc*|aix[5-9].*-*gcc*)		# lightly tested by jik@cisco.com
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='ld'
-	SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall'
-	SHOBJ_XLDFLAGS='-G'
-
-	SHLIB_XLDFLAGS='-bM:SRE'
-	SHLIB_LIBS='-lcurses -lc'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-aix4.[2-9]*|aix[5-9].*)
-	SHOBJ_CFLAGS=-K
-	SHOBJ_LD='ld'
-	SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall'
-	SHOBJ_XLDFLAGS='-G'
-
-	SHLIB_XLDFLAGS='-bM:SRE'
-	SHLIB_LIBS='-lcurses -lc'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-#
-# THE FOLLOWING ARE UNTESTED -- and some may not support the dlopen interface
-#
-irix[56]*-*gcc*)
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-irix[56]*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-#	SHOBJ_LDFLAGS='-call_shared -hidden_symbol -no_unresolved -soname $@'
-#	Change from David Kaelbling <drk@sgi.com>.  If you have problems,
-#	remove the `-no_unresolved'
-	SHOBJ_LDFLAGS='-shared -no_unresolved -soname $@'
-
-	SHLIB_XLDFLAGS='-rpath $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux9*-*gcc*)
-	# must use gcc; the bundled cc cannot compile PIC code
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,+s'
-
-	SHLIB_XLDFLAGS='-Wl,+b,$(libdir)'
-	SHLIB_LIBSUFF='sl'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux9*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-
-	# If you are using the HP ANSI C compiler, you can uncomment and use
-	# this code (I have not tested it)
-#	SHOBJ_STATUS=supported
-#	SHLIB_STATUS=supported
-#
-#	SHOBJ_CFLAGS='+z'
-#	SHOBJ_LD='ld'
-#	SHOBJ_LDFLAGS='-b +s'
-#
-#	SHLIB_XLDFLAGS='+b $(libdir)'
-#	SHLIB_LIBSUFF='sl'
-#	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'	
-
-	;;
-
-hpux10*-*gcc*)
-	# must use gcc; the bundled cc cannot compile PIC code
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	# if you have problems linking here, moving the `-Wl,+h,$@' from
-	# SHLIB_XLDFLAGS to SHOBJ_LDFLAGS has been reported to work
-	SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s'
-
-	SHLIB_XLDFLAGS='-Wl,+h,$@ -Wl,+b,$(libdir)'
-	SHLIB_LIBSUFF='sl'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux10*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-
-	# If you are using the HP ANSI C compiler, you can uncomment and use
-	# this code (I have not tested it)
-#	SHOBJ_STATUS=supported
-#	SHLIB_STATUS=supported
-#
-#	SHOBJ_CFLAGS='+z'
-#	SHOBJ_LD='ld'
-#	SHOBJ_LDFLAGS='-b +s +h $@'
-#
-#	SHLIB_XLDFLAGS='+b $(libdir)'
-#	SHLIB_LIBSUFF='sl'
-#	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'	
-
-	;;
-
-hpux11*-*gcc*)
-	# must use gcc; the bundled cc cannot compile PIC code
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-#	SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,-B,symbolic -Wl,+s -Wl,+std -Wl,+h,$@'
-	SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s -Wl,+h,$@'
-
-	SHLIB_XLDFLAGS='-Wl,+b,$(libdir)'
-	SHLIB_LIBSUFF='sl'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux11*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-
-	# If you are using the HP ANSI C compiler, you can uncomment and use
-	# this code (I have not tested it)
-#	SHOBJ_STATUS=supported
-#	SHLIB_STATUS=supported
-#
-#	SHOBJ_CFLAGS='+z'
-#	SHOBJ_LD='ld'
-#	SHOBJ_LDFLAGS='-b +s +h $@'
-#
-#	SHLIB_XLDFLAGS='+b $(libdir)'
-#	SHLIB_LIBSUFF='sl'
-#	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'	
-
-	;;
-
-sysv4*-*gcc*)
-	SHOBJ_CFLAGS=-shared
-	SHOBJ_LDFLAGS='-shared -h $@'
-	SHOBJ_LD='${CC}'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv4*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-dy -z text -G -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sco3.2v5*-*gcc*)
-	SHOBJ_CFLAGS='-fpic'		# DEFAULTS TO ELF
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sco3.2v5*)
-	SHOBJ_CFLAGS='-K pic -b elf'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -b elf -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5uw7*-*gcc*)
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5uw7*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5UnixWare*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5UnixWare*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5OpenUNIX*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5OpenUNIX*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-dgux*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-dgux*)
-	SHOBJ_CFLAGS='-K pic'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-msdos*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-	;;
-
-cygwin*)
-	SHOBJ_LD='$(CC)'
-	SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a'
-	SHLIB_LIBPREF='cyg'
-	SHLIB_LIBSUFF='dll'
-	SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBS='$(TERMCAP_LIB)'
-
-	SHLIB_DOT=
-	# For official cygwin releases, DLLVERSION will be defined in the
-	# environment of configure, and will be incremented any time the API
-	# changes in a non-backwards compatible manner.  Otherwise, it is just
-	# SHLIB_MAJOR.
-	if [ -n "$DLLVERSION" ] ; then
-		SHLIB_DLLVERSION="$DLLVERSION"
-	fi
-	;;
-
-mingw*)
-	SHOBJ_LD='$(CC)'
-	SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a'
-	SHLIB_LIBSUFF='dll'
-	SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBS='$(TERMCAP_LIB)'
-
-	SHLIB_DOT=
-	# For official cygwin releases, DLLVERSION will be defined in the
-	# environment of configure, and will be incremented any time the API
-	# changes in a non-backwards compatible manner.  Otherwise, it is just
-	# SHLIB_MAJOR.
-	if [ -n "$DLLVERSION" ] ; then
-		SHLIB_DLLVERSION="$DLLVERSION"
-	fi
-	;;
-
-#
-# Rely on correct gcc configuration for everything else
-#
-*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-	;;
-
-esac
-
-echo SHOBJ_CC=\'"$SHOBJ_CC"\'
-echo SHOBJ_CFLAGS=\'"$SHOBJ_CFLAGS"\'
-echo SHOBJ_LD=\'"$SHOBJ_LD"\'
-echo SHOBJ_LDFLAGS=\'"$SHOBJ_LDFLAGS"\'
-echo SHOBJ_XLDFLAGS=\'"$SHOBJ_XLDFLAGS"\'
-echo SHOBJ_LIBS=\'"$SHOBJ_LIBS"\'
-
-echo SHLIB_XLDFLAGS=\'"$SHLIB_XLDFLAGS"\'
-echo SHLIB_LIBS=\'"$SHLIB_LIBS"\'
-
-echo SHLIB_DOT=\'"$SHLIB_DOT"\'
-
-echo SHLIB_LIBPREF=\'"$SHLIB_LIBPREF"\'
-echo SHLIB_LIBSUFF=\'"$SHLIB_LIBSUFF"\'
-
-echo SHLIB_LIBVERSION=\'"$SHLIB_LIBVERSION"\'
-echo SHLIB_DLLVERSION=\'"$SHLIB_DLLVERSION"\'
-
-echo SHOBJ_STATUS=\'"$SHOBJ_STATUS"\'
-echo SHLIB_STATUS=\'"$SHLIB_STATUS"\'
-
-exit 0
diff --git a/exsrc/src/yes.txt b/exsrc/src/yes.txt
deleted file mode 100644
index c6991e8fe8bbb05d449d2c9886dc4a9fb15b6c13..0000000000000000000000000000000000000000
--- a/exsrc/src/yes.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-yes
-
diff --git a/exsrc/twisted.sh b/exsrc/twisted.sh
deleted file mode 100755
index fafb9ea76d35d6fdb7e1270f9cab3b1e8d4ae12f..0000000000000000000000000000000000000000
--- a/exsrc/twisted.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="Twisted"
-. ./prolog.sh 
-# Twisted.
-(cd Twisted-*/zope.interface*; ${prefix}/${version}/bin/python setup.py build ${D} install; cd .. ; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/vtk.sh b/exsrc/vtk.sh
deleted file mode 100755
index 7f15b4f5008414dc333da06c1aba48fefac2e065..0000000000000000000000000000000000000000
--- a/exsrc/vtk.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh
-PACKAGE="VTK"
-. ./prolog.sh
-(  BUILD_DIR=`pwd`;\
-   cd VTK*; \
-   sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \
-       -e 's/PY_VERSION/2.4/g' \
-       -e 's@CDAT_BUILD_DIR@'${BUILD_DIR}'@g' \
-       -e 's/TCLTK_VERSION/8.4/g' ../../VTK_BUILD_ANSWERS.core > VTK_BUILD_ANSWERS.feed ; \
-   mkdir -p ${prefix}/Externals/VTK;\
-   cp VTK_BUILD_ANSWERS.feed ${prefix}/Externals/VTK/CMakeCache.txt ;
-   cd ${prefix}/Externals/VTK ;\
-   ${prefix}/Externals/bin/cmake CMakeCache.txt ;\
-   make; make install ; \
-   cd Wrapping/Python ; \
-   ${prefix}/${version}/bin/python setup.py install; \
-)
diff --git a/exsrc/xgks.sh b/exsrc/xgks.sh
deleted file mode 100755
index 5061fb541136aaac6435cbc2bb0bfec3ea63a7f6..0000000000000000000000000000000000000000
--- a/exsrc/xgks.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-PACKAGE="xgks"
-OS=`uname`
-. ./prolog.sh
-# xgks
-if ( test "${OS}" = 'Darwin' ) then
-    CPP_X11="-I/usr/X11R6/include"; export CPP_X11
-fi
-LD_X11=""; export LD_X11
-FC='';export FC
-# The configure step will make a header file udposix.h that vcs needs
-cd xgks 
-./configure --prefix=${prefix}/Externals || exit 1
-echo "Installing udposix.h"
-/bin/rm -fr ${prefix}/Externals/include/udposix.h || exit 1
-/bin/cp port/misc/udposix.h ${prefix}/Externals/include/udposix.h || exit 1
-make port/all || exit 1
-make port/install || exit 1
-# added the CXX define for MacOS
-make CXX=cc fontdb/all || exit 1
-make CXX=cc fontdb/install || exit 1
diff --git a/images/2leftarrow.gif b/images/2leftarrow.gif
deleted file mode 100644
index 0f42224dad68b601ac86e3ef71cf1bb89bc7f85d..0000000000000000000000000000000000000000
Binary files a/images/2leftarrow.gif and /dev/null differ
diff --git a/images/2rightarrow.gif b/images/2rightarrow.gif
deleted file mode 100644
index befbdfad31ef374dcea72afb17a1442ca726ad88..0000000000000000000000000000000000000000
Binary files a/images/2rightarrow.gif and /dev/null differ
diff --git a/images/ASD.scr b/images/ASD.scr
deleted file mode 100755
index dfc4be3e4ebc04cc03ac9b66de0d6de0d8c39944..0000000000000000000000000000000000000000
--- a/images/ASD.scr
+++ /dev/null
@@ -1,1268 +0,0 @@
-Tt_ASD1(3,1,1,0.2,1)
-Tt_ASD2(3,1,1,0.2,1)
-To_ASD1(0.03,0,r,c,h)
-To_ASD2(0.019,0,r,c,h)
-P_ASD(
-  File(p=1,x=0.0725,y=0.02125,Tt=default,To=default),
-  Function(p=1,x=0.0725,y=0.02125,Tt=default,To=default),
-  LogicalMask(p=1,x=0.0725,y=0.03625,Tt=default,To=default),
-  Transform(p=1,x=0.0725,y=0.05125,Tt=default,To=default),
-  source(p=1,x=0.0725,y=0.70375,Tt=default,To=default),
-  name(p=1,x=0.0725,y=0.68875,Tt=default,To=default),
-  title(p=1,x=0.1675,y=0.68875,Tt=default,To=default),
-  units(p=1,x=0.6615,y=0.68875,Tt=default,To=default),
-  crdate(p=1,x=0.7375,y=0.68875,Tt=default,To=default),
-  crtime(p=1,x=0.8325,y=0.68875,Tt=default,To=default),
-  comment#1(p=1,x=0.909091,y=0.0466611,Tt=default,To=default),
-  comment#2(p=1,x=0.12,y=0.72875,Tt=default,To=default),
-  comment#3(p=1,x=0.12,y=0.74375,Tt=default,To=default),
-  comment#4(p=1,x=0.85,y=0.070,Tt=default,To=default),
-  xname(p=1,x=0.499345,y=0.17035,Tt=default,To=defcenter),
-  yname(p=1,x=0.0169,y=0.420034,Tt=default,To=defcentup),
-  zname(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  tname(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  xunits(p=0,x=0.595,y=0.22125,Tt=default,To=default),
-  yunits(p=0,x=0.044,y=0.48875,Tt=default,To=defcentup),
-  zunits(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  tunits(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  xvalue(p=1,x=0.785,y=0.70375,Th=default,Tt=default,To=default),
-  yvalue(p=1,x=0.785,y=0.68875,Th=default,Tt=default,To=default),
-  zvalue(p=1,x=0.785,y=0.67375,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.785,y=0.65875,Th=default,Tt=default,To=default),
-  mean(p=1,x=0.0725,y=0.66875,Th=default,Tt=default,To=default),
-  max(p=1,x=0.2625,y=0.66875,Th=default,Tt=default,To=default),
-  min(p=1,x=0.4525,y=0.66875,Th=default,Tt=default,To=default),
-  xtic#1(p=1,y1=0.21125,y2=0.20175,Tl=default),
-  xtic#2(p=1,y1=0.63875,y2=0.64825,Tl=default),
-  xmintic#a(p=1,y1=0.21125,y2=0.2065,Tl=default),
-  xmintic#b(p=1,y1=0.64825,y2=0.6535,Tl=default),
-  ytic#1(p=1,x1=0.0725,x2=0.063,Tl=default),
-  ytic#2(p=1,x1=0.9275,x2=0.937,Tl=default),
-  ymintic#a(p=1,x1=0.0725,x2=0.06775,Tl=default),
-  ymintic#b(p=1,x1=0.9275,x2=0.93225,Tl=default),
-  xlabel#1(p=1,y=0.19035,Tt=default,To=defcenter),
-  xlabel#2(p=1,y=0.66152,Tt=default,To=defcenter),
-  ylabel#1(p=1,x=0.063,Tt=default,To=defright),
-  ylabel#2(p=1,x=0.937,Tt=default,To=default),
-  box#1(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875,Tl=default),
-  box#2(p=0,x1=0.025,y1=0.23975,x2=0.899,y2=0.65775,Tl=default),
-  box#3(p=0,x1=0.025,y1=0.24925,x2=0.8895,y2=0.64825,Tl=default),
-  box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  line#1(p=0,x1=0.0725,y1=0.425,x2=0.9275,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.21125,x2=0.5,y2=0.63875,Tl=default),
-  line#3(p=0,x1=0.025,y1=0.78125,x2=0.88,y2=0.78125,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.0725,y1=0.11625,x2=0.9275,y2=0.13525,Tt=default,To=defcenter,Tl=default),
-  data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875)  )
-P_ASD_dud(
-  File(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  Function(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  LogicalMask(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  Transform(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  source(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  name(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  title(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  units(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  crdate(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  crtime(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#1(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#2(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#3(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#4(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  xname(p=0,x=0.025,y=0.0112546,Tt=default,To=defcenter),
-  yname(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup),
-  zname(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  tname(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  xunits(p=0,x=0.025,y=0.01125,Tt=default,To=default),
-  yunits(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup),
-  zunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  tunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  xvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  tvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  max(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  min(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  xtic#2(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  xmintic#a(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  xmintic#b(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  ytic#1(p=0,x1=0.025,x2=0.025,Tl=default),
-  ytic#2(p=0,x1=0.025,x2=0.025,Tl=default),
-  ymintic#a(p=0,x1=0.025,x2=0.025,Tl=default),
-  ymintic#b(p=0,x1=0.025,x2=0.025,Tl=default),
-  xlabel#1(p=0,y=0.0212495,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0.02125,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.025,Tt=default,To=defright),
-  ylabel#2(p=0,x=0.025,Tt=default,To=default),
-  box#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default),
-  box#2(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  box#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  line#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default),
-  line#2(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default),
-  line#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  line#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  legend(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tt=default,To=defcenter,Tl=default),
-  data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875)  )
-P_ASD1(
-  File(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default),
-  Function(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default),
-  LogicalMask(p=1,x=0.0780229,y=0.00653595,Tt=default,To=default),
-  Transform(p=1,x=0.0780229,y=0.0163235,Tt=default,To=default),
-  source(p=0,x=0.0669935,y=0.717229,Tt=default,To=default),
-  name(p=0,x=0.0669935,y=0.705229,Tt=default,To=default),
-  title(p=1,x=0.348809,y=0.705235,Tt=ASD1,To=ASD1),
-  units(p=0,x=0.686993,y=0.705229,Tt=default,To=default),
-  crdate(p=0,x=0.766993,y=0.705229,Tt=default,To=default),
-  crtime(p=0,x=0.866993,y=0.705229,Tt=default,To=default),
-  comment#1(p=1,x=0.2,y=0.025,Tt=ASD2,To=ASD2),
-  comment#2(p=1,x=0.1,y=0.025,Tt=ASD2,To=ASD2),
-  comment#3(p=0,x=0.139052,y=0.711242,Tt=default,To=default),
-  comment#4(p=1,x=0.0339869,y=0.360785,Tt=default,To=defcentup),
-  xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter),
-  yname(p=1,x=0.0221,y=0.327701,Tt=default,To=defcentup),
-  zname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  tname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  xunits(p=0,x=0.616993,y=0.215229,Tt=default,To=default),
-  yunits(p=0,x=0.0369935,y=0.505229,Tt=default,To=defcentup),
-  zunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  tunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  xvalue(p=1,x=0.993464,y=0.672091,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.816993,y=0.695229,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.816993,y=0.685229,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.993464,y=0.642729,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.0669935,y=0.685229,Th=default,Tt=default,To=default),
-  max(p=0,x=0.266993,y=0.685229,Th=default,Tt=default,To=default),
-  min(p=0,x=0.466993,y=0.685229,Th=default,Tt=default,To=default),
-  xtic#1(p=1,y1=0.0640523,y2=0.0724123,Tl=default),
-  xtic#2(p=1,y1=0.624837,y2=0.616477,Tl=default),
-  xmintic#a(p=0,y1=0.0640523, y2=0.067996695,Tl=default),
-  xmintic#b(p=0,y1=0.620657,y2=0.624837,Tl=default),
-  ytic#1(p=1,x1=0.1071242,x2=0.115306,Tl=default),
-  ytic#2(p=1,x1=0.819543,x2=0.811361,Tl=default),
-  ymintic#a(p=0,x1=0.1071242,x2=0.1112151,Tl=default),
-  ymintic#b(p=0,x1=0.819543,x2=0.815452,Tl=default),
-  xlabel#1(p=1,y=0.0522873,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0.64152,Tt=default,To=defcenter),
-  ylabel#1(p=1,x=0.0979738,Tt=default,To=defright),
-  ylabel#2(p=0,x=0.827,Tt=default,To=default),
-  box#1(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default),
-  box#2(p=0,x1=0.0169935,y1=0.235229,x2=0.936993,y2=0.675229,Tl=default),
-  box#3(p=0,x1=0.0169935,y1=0.245229,x2=0.926993,y2=0.665229,Tl=default),
-  box#4(p=0,x1=0.0169935,y1=0.00522876,x2=0.0169935,y2=0.00522876,Tl=default),
-  line#1(p=0,x1=0.0669935,y1=0.430229,x2=0.966993,y2=0.430229,Tl=default),
-  line#2(p=0,x1=0.516993,y1=0.205229,x2=0.516993,y2=0.655229,Tl=default),
-  line#3(p=0,x1=0.0169935,y1=0.405229,x2=0.916993,y2=0.405229,Tl=default),
-  line#4(p=0,x1=0.0169935,y1=0.805229,x2=0.916993,y2=0.805229,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.617701,x2=0.909091,y2=0.617701,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD2(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.3,y=0.15,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.3,y=0.15,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.599123,x2=0.909091,y2=0.599123,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD3(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1),
-  comment#2(p=1,x=0.5,y=0.691504,Tt=ASD2,To=ASD2),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup),
-  xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default),
-  xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default),
-  xmintic#a(p=1,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=1,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default),
-  ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.0679738,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.580546,x2=0.909091,y2=0.580546,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD4(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1),
-  comment#2(p=1,x=0.5,y=0.691504,Tt=ASD1,To=ASD1),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup),
-  xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default),
-  xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default),
-  ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.0679738,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.557324,x2=0.909091,y2=0.557324,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD5(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.538747,x2=0.909091,y2=0.538747,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD6(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.520169,x2=0.909091,y2=0.520169,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD7(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.501592,x2=0.909091,y2=0.501592,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD8(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.483014,x2=0.909091,y2=0.483014,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD9(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.464437,x2=0.909091,y2=0.464437,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD10(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.445859,x2=0.909091,y2=0.445859,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD11(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.402615,y=0.104575,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.427282,x2=0.909091,y2=0.427282,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD12(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.408704,x2=0.909091,y2=0.408704,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD13(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.390127,x2=0.909091,y2=0.390127,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD14(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=1,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.371549,x2=0.909091,y2=0.371549,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD15(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.352972,x2=0.909091,y2=0.352972,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_mwbotcenter(1,1,1,0.2,1)
-To_mwbotcenter(0.01,0,r,c,b)
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-Gfb_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   level_1=1e+20,level_2=1e+20,color_1=16,color_2=239,legend_type=0,
-   legend=(),
-   ext_1=n,ext_2=n,missing=241)
-Gcon_ASD(
-   projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,yticlabels#1=lat20,
-   yticlabels#2=lat20,
-   datawc(-180,-90,180,90),
-   Tl=ASDCont,
-   Type=1,
-  )
-Tl_ASDCont(1,2.8,241)
-Gfi_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,xmtics#1=*,xmtics#2=*,
-   yticlabels#1=*,yticlabels#2=*,ymtics#1=*,ymtics#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   missing=1e+20,
-   range
-   (id=0,level1=1e+20,level2=1e+20,Tf=default)  )
-Gi_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   make_labels=n,
-   lines
-   (id=0,priority=1,level=0,increment=1e+20,hilite_ci=0,
-    label=*,Tl=default,Tt=default,To=default)
-  )
-Gfo_ASD(
-   projection=linear,xticlabels#1=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tf=default,
-   outline(1, 2, 3, 4, 5, 6, 7))
-Go_ASD(
-   projection=linear,xticlabels#1=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tl=default,
-   outline(1, 2, 3, 4, 5, 6, 7))
-GXy_ASD1(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD1,Tm=None)
-Tl_ASD1(1,4.9,241)
-Tm_ASD1(1,4.9,241)
-GXy_ASD2(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD2,Tm=None)
-Tl_ASD2(1,4.9,242)
-Tm_ASD2(2,4.9,242)
-GXy_ASD3(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD3,Tm=None)
-Tl_ASD3(1,4.9,243)
-Tm_ASD3(3,4.9,243)
-GXy_ASD4(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD4,Tm=None)
-Tl_ASD4(1,4.9,244)
-Tm_ASD4(4,4.9,244)
-GXy_ASD5(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD5,Tm=None)
-Tl_ASD5(1,4.9,245)
-Tm_ASD5(5,4.9,245)
-GXy_ASD6(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD6,Tm=None)
-Tl_ASD6(1,4.9,246)
-Tm_ASD6(6,4.9,246)
-GXy_ASD7(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD7,Tm=None)
-Tl_ASD7(1,4.9,247)
-Tm_ASD7(7,4.9,247)
-GXy_ASD8(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD8,Tm=None)
-Tl_ASD8(1,4.9,248)
-Tm_ASD8(8,4.9,248)
-GXy_ASD9(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD9,Tm=None)
-Tl_ASD9(1,4.9,249)
-Tm_ASD9(9,4.9,249)
-GXy_ASD10(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD10,Tm=None)
-Tl_ASD10(1,4.9,250)
-Tm_ASD10(10,4.9,250)
-GXy_ASD11(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD11,Tm=None)
-Tl_ASD11(1,4.9,251)
-Tm_ASD11(11,4.9,251)
-GXy_ASD12(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD12,Tm=None)
-Tl_ASD12(1,4.9,252)
-Tm_ASD12(12,4.9,252)
-GXy_ASD13(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD13,Tm=None)
-Tl_ASD13(1,4.9,253)
-Tm_ASD13(13,4.9,253)
-GXy_ASD14(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD14,Tm=None)
-Tl_ASD14(1,4.9,254)
-Tm_ASD14(14,4.9,254)
-GXy_ASD15(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD15,Tm=None)
-Tl_ASD15(1,4.9,255)
-Tm_ASD15(15,4.9,255)
-GYx_ASD1(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD1,Tm=none)
-GYx_ASD2(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,40,120,100),
-   xaxisconvert=linear,
-   Tl=ASD2,Tm=none)
-GYx_ASD3(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,40,120,100),
-   xaxisconvert=linear,
-   Tl=ASD3,Tm=none)
-GYx_ASD4(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD4,Tm=none)
-GYx_ASD5(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD5,Tm=none)
-GYx_ASD6(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD6,Tm=none)
-GYx_ASD7(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD7,Tm=none)
-GYx_ASD8(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD8,Tm=none)
-GYx_ASD9(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD9,Tm=none)
-GYx_ASD10(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD10,Tm=none)
-GYx_ASD11(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD11,Tm=none)
-GYx_ASD12(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD12,Tm=none)
-GYx_ASD13(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD13,Tm=none)
-GYx_ASD14(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD14,Tm=none)
-GYx_ASD15(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD15,Tm=none)
-GSp_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tm=default)
-Gv_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tl=default,vector_scale=1,vector_align=c,vector_type=2,ref_vector=1e+20)
-C_ASD(
-   100,100,100,   0,0,0,   84.7059,84.7059,84.7059,   31.7647,31.7647,31.7647,   100,100,100,   100,100,0,
-   0,2.7451,100,   0,5.4902,100,   0,9.01961,100,   0,11.3725,100,   0,14.902,100,   0,17.6471,100,
-   0,21.1765,100,   0,23.9216,100,   0,26.6667,100,   0,30.1961,100,   0,32.9412,100,   0,35.6863,100,
-   0,39.2157,100,   0,41.9608,100,   0,44.7059,100,   0,48.2353,100,   0,50.9804,100,   0,54.1176,100,
-   0,56.8627,100,   0,60.3922,100,   0,63.1373,100,   0,66.6667,100,   0,69.4118,100,   0,72.1569,100,
-   0,75.6863,100,   0,78.4314,100,   0,81.1765,100,   0,84.7059,100,   0,87.451,100,   0,90.1961,100,
-   0,93.7255,100,   0,96.4706,100,   0,100,100,   0,100,96.4706,   0,100,93.7255,   0,100,90.1961,
-   0,100,87.451,   0,100,84.7059,   0,100,81.1765,   0,100,78.4314,   0,100,75.6863,   0,100,72.1569,
-   0,100,69.4118,   0,100,66.6667,   0,100,63.1373,   0,100,60.3922,   0,100,56.8627,   0,100,54.1176,
-   0,100,50.9804,   0,100,48.2353,   0,100,44.7059,   0,100,41.9608,   0,100,39.2157,   0,100,35.6863,
-   0,100,32.9412,   0,100,30.1961,   0,100,26.6667,   0,100,23.9216,   0,100,21.1765,   0,100,17.6471,
-   0,100,14.902,   0,100,11.3725,   0,100,9.01961,   0,100,5.4902,   0,100,2.7451,   0,100,0,
-   2.7451,100,0,   5.4902,100,0,   9.01961,100,0,   11.3725,100,0,   14.902,100,0,   17.6471,100,0,
-   21.1765,100,0,   23.9216,100,0,   26.6667,100,0,   30.1961,100,0,   32.9412,100,0,   35.6863,100,0,
-   39.2157,100,0,   41.9608,100,0,   44.7059,100,0,   48.2353,100,0,   50.9804,100,0,   54.1176,100,0,
-   56.8627,100,0,   60.3922,100,0,   63.1373,100,0,   66.6667,100,0,   69.4118,100,0,   72.1569,100,0,
-   75.6863,100,0,   78.4314,100,0,   81.1765,100,0,   84.7059,100,0,   87.451,100,0,   90.1961,100,0,
-   93.7255,100,0,   96.4706,100,0,   100,100,0,   100,97.6471,0,   100,95.6863,0,   100,93.7255,0,
-   100,91.3726,0,   100,89.4118,0,   100,87.451,0,   100,85.4902,0,   100,83.1373,0,   100,81.1765,0,
-   100,79.2157,0,   100,77.6471,0,   100,75.6863,0,   100,73.7255,0,   100,71.3726,0,   100,69.4118,0,
-   100,67.451,0,   100,65.4902,0,   100,63.1373,0,   100,61.1765,0,   100,59.2157,0,   100,56.8627,0,
-   100,54.902,0,   100,52.9412,0,   100,50.9804,0,   100,49.4118,0,   100,47.451,0,   100,44.7059,0,
-   100,43.1373,0,   100,41.1765,0,   100,39.2157,0,   100,36.8627,0,   100,34.902,0,   100,32.9412,0,
-   100,32.1569,0,   100,30.9804,0,   100,30.1961,0,   100,28.6275,0,   100,28.2353,0,   100,26.6667,0,
-   100,25.8824,0,   100,24.7059,0,   100,23.9216,0,   100,23.1373,0,   100,21.9608,0,   100,21.1765,0,
-   100,20,0,   100,18.4314,0,   100,17.6471,0,   100,16.4706,0,   100,15.6863,0,   100,14.902,0,
-   100,13.7255,0,   100,12.9412,0,   100,11.3725,0,   100,10.9804,0,   100,9.41177,0,   100,9.01961,0,
-   100,7.84314,0,   100,6.66667,0,   100,5.4902,0,   100,4.31373,0,   100,3.92157,0,   100,2.7451,0,
-   100,1.56863,0,   100,0.784314,0,   100,0,0,   97.6471,0,0,   95.6863,0,0,   93.7255,0,0,
-   92.1569,0,0,   90.1961,0,0,   88.2353,0,0,   86.6667,0,0,   84.7059,0,0,   82.7451,0,0,
-   80.3922,0,0,   79.2157,0,0,   76.8627,0,0,   74.902,0,0,   72.9412,0,0,   71.3726,0,0,
-   69.4118,0,0,   67.451,0,0,   65.8824,0,0,   63.9216,0,0,   61.9608,0,0,   60,0,0,
-   58.4314,0,0,   56.4706,0,0,   54.1176,0,0,   52.1569,0,0,   50.1961,0,0,   48.6275,0,0,
-   46.6667,0,0,   44.7059,0,0,   43.1373,0,0,   41.1765,0,0,   39.2157,0,0,   37.6471,0,0,
-   38.4314,0,1.56863,   39.2157,0,3.92157,   40.3922,0,5.4902,   41.1765,0,7.84314,   41.9608,0,10.1961,   43.1373,0,12.1569,
-   43.9216,0,13.7255,   44.7059,0,15.6863,   45.8824,0,18.4314,   46.6667,0,20.3922,   48.2353,0,21.9608,   48.6275,0,23.9216,
-   50.1961,0,25.8824,   50.9804,0,28.6275,   52.1569,0,30.1961,   52.9412,0,32.1569,   53.7255,0,34.1176,   54.902,0,36.4706,
-   55.6863,0,38.4314,   56.4706,0,40.3922,   57.6471,0,42.7451,   58.4314,0,44.7059,   59.2157,0,46.6667,   60.3922,0,48.6275,
-   61.1765,0,50.9804,   62.7451,0,52.9412,   63.1373,0,54.902,   64.7059,0,56.8627,   65.4902,0,59.2157,   66.6667,0,61.1765,
-   67.451,0,63.1373,   68.2353,0,65.4902,   69.4118,0,67.451,   70.1961,0,69.4118,   71.3726,0,71.3726,   72.1569,0,73.7255)
-
-Gtd_ASD(
-detail = 50;
-max = None;
-quadrans = 1;
-skillValues = [0.10000000000000001, 0.20000000000000001, 0.29999999999999999, 0.40000000000000002, 0.5, 0.59999999999999998, 0.69999999999999996, 0.80000000000000004, 0.90000000000000002, 0.94999999999999996];
-referencevalue = 1.0;
-arrowlength = 0.05;
-arrowangle = 20.0;
-arrowbase = 0.75;
-Marker;
-    status = [];
-    line = [];
-    id = [];
-    id_size = [];
-    id_color = [];
-    id_font = [];
-    symbol = [];
-    color = [];
-    size = [];
-    xoffset = [];
-    yoffset = [];
-    line_color = [];
-    line_size = [];
-    line_type = [];
-)
-Gfm_ASD(
-   projection=linear,xticlabels#1=*,
-   xticlabels#2=*,
-   xmtics#1=*,
-   xmtics#2=*,
-   yticlabels#1=*,
-   yticlabels#2=*,
-   ymtics#1=*,
-   ymtics#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   missing=241,
-   mesh=0,
-   wrap
-   (0, 0),
-   range
-   (id=0,level1=1e+20,level2=1e+20,Tf=default)  )
-
diff --git a/images/HARD_COPY b/images/HARD_COPY
deleted file mode 100755
index 04830595744d7faae01f05966a2947a2d905f934..0000000000000000000000000000000000000000
--- a/images/HARD_COPY
+++ /dev/null
@@ -1,76 +0,0 @@
-#######################################################################
-########################## H A R D   C O P Y ##########################
-#######################################################################
-#                                                                     #
-# This file contains the user specified printer names located on      #
-# their network!  See the "/etc/printcap" file for a list of          #
-# active printers. It is important to read this entire file for       #
-# instructions!!!!                                                    #
-#                                                                     #
-# The '#' at the start of a line indicates a comment or statement by  #
-# the user.                                                           #
-#                                                                     #
-# I M P O R T A N T    N O T I C E ! ! ! !                            #
-# - - - - - - - - -    - - - - - - - - - -                            #
-# VCS has no way of knowing which print manager your system is using. #
-# That is, 'lpr' (the BSD print spooler) or 'lp'. If the set          #
-# environment variable 'PRINTER' is unset, then VCS will use 'lp'.    #
-# If the set environment variable 'PRINTER' is set to 'printer',      #
-# then VCS will use 'lpr'.                                            #
-#                                                                     #
-#                                                                     #
-# If sending a CGM file to the printer from VCS results in an error   #
-# message (e.g., 'Error - In sending CGM file to printer.'), then     #
-# set or unset the 'PRINTER' environment variable.                    #
-#                                                                     #
-#######################################################################
-#######################################################################
-#######################################################################
-
-#######################################################################
-#######################################################################
-#######################################################################
-#             I M P O R T A N T    N O T I C E ! ! ! !                #
-#             - - - - - - - - -    - - - - - - - - - -                #
-# The lines below are used for GPLOT.  GPLOT is a graphics utility    #
-# program designed for the processing of CGM metafiles.  We use       #
-# GPLOT to convert the cgm file(s) to postscript output and send it   #
-# directly to a postscript printer. The absolute gplot path must be   #
-# set properly (below). That is:                                      #
-# landscape = /absolute_path/gplot -dPSC -r90 ...                     #
-# portrait  = /absolute_path/gplot -dPSC -D ...                       #
-#                                                                     #
-#######################################################################
-#######################################################################
-#######################################################################
-
-############################################################################
-# PRINTER ORIENTATION: Landscape                                           #
-# OUTPUT TYPE: Postscript       COLOR: YES                                 #
-# NOTE: THIS IS FOR SENDING TO THE PRINTER                                 #
-#                                                                          #
-# .cshrc file:                                                             #
-# In your .cshrc file you can set up an alias for converting your          #
-# landscape .cgm files. That is,                                           #
-# alias landscape '/absolute_path/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10' #
-#                                                                          #
-############################################################################
-#landscape = /usr/local/bin/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10
-
-#######################################################################
-# PRINTER ORIENTATION: Portrait                                       #
-# OUTPUT TYPE: Postscript       COLOR: YES                            #
-# NOTE: THIS IS FOR SENDING TO THE PRINTER                            #
-#                                                                     #
-# .cshrc file:                                                        #
-# In your .cshrc file you can set up an alias for converting your     #
-# portscript .cgm files. That is,                                     #
-# alias portrait '/absolute_path/gplot -dPSC -D -X10 -Y12.5'          #
-#                                                                     #
-#######################################################################
-#portrait = /usr/local/bin/gplot -dPSC -D -X10 -Y12.5
-
-#######################################################################
-###################  P R I N T E R   N A M E S  #######################
-#######################################################################
-
diff --git a/images/PCM_isofill.scr b/images/PCM_isofill.scr
deleted file mode 100644
index c42b94f2473aa23606641032e6126cf5e56930d9..0000000000000000000000000000000000000000
--- a/images/PCM_isofill.scr
+++ /dev/null
@@ -1,976 +0,0 @@
-L_PCM_p_levels(1000,"1000",900,"900",800,"800",700,"700",600,"600",
-   500,"500",400,"400",300,"300",200,"200",100,"100",50,"50",
-   10,"10")
-
-L_PCM_height(1000,"0",795,"2",616.6,"4",472.2,"6",356.5,"8",
-   265,"10",121.1,"15",55.3,"20",12,"30")
-
-Tf_PCM16(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(16),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM17(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(17),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM18(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(18),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM19(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(19),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM20(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(20),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM21(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(21),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM22(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(22),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM23(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(23),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM24(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(24),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM25(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(25),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM26(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(26),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM27(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(27),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM28(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(28),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM29(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(29),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM30(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(30),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM31(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(31),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM32(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(32),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM33(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(33),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM34(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(34),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM35(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(35),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM36(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(36),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM241(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(241),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-
-
-Gfi_PCM_clt(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0.,level2=.10,Tf=PCM16)
-(id=2,level1=.10,level2=.20,Tf=PCM22)
-(id=3,level1=.20,level2=.30,Tf=PCM23)
-(id=4,level1=.30,level2=.40,Tf=PCM32)
-(id=5,level1=.40,level2=.50,Tf=PCM33)
-(id=6,level1=.50,level2=.60,Tf=PCM34)
-(id=7,level1=.60,level2=.70,Tf=PCM27)
-(id=8,level1=.70,level2=.80,Tf=PCM28)
-(id=9,level1=.80,level2=.90,Tf=PCM29)
-(id=10,level1=.90,level2=1.00,Tf=PCM30) )
-
-Gfi_PCM_hfls(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=0,Tf=PCM16)
-(id=2,level1=0,level2=25,Tf=PCM18)
-(id=3,level1=25,level2=50,Tf=PCM19)
-(id=4,level1=50,level2=75,Tf=PCM20)
-(id=5,level1=75,level2=100,Tf=PCM21)
-(id=6,level1=100,level2=125,Tf=PCM22)
-(id=7,level1=125,level2=150,Tf=PCM23)
-(id=8,level1=150,level2=175,Tf=PCM24)
-(id=9,level1=175,level2=200,Tf=PCM25)
-(id=10,level1=200,level2=225,Tf=PCM26)
-(id=11,level1=225,level2=250,Tf=PCM27)
-(id=12,level1=250,level2=275,Tf=PCM28)
-(id=13,level1=275,level2=300,Tf=PCM29)
-(id=14,level1=300,level2=1e+20,Tf=PCM30) )
-
-Gfi_PCM_hfss(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-100,Tf=PCM16)
-(id=2,level1=-100,level2=-50,Tf=PCM18)
-(id=3,level1=-50,level2=-25,Tf=PCM19)
-(id=4,level1=-25,level2=-15,Tf=PCM20)
-(id=5,level1=-15,level2=-10,Tf=PCM21)
-(id=6,level1=-10,level2=-5,Tf=PCM22)
-(id=7,level1=-5,level2=0,Tf=PCM23)
-(id=8,level1=0,level2=5,Tf=PCM24)
-(id=9,level1=5,level2=10,Tf=PCM25)
-(id=10,level1=10,level2=15,Tf=PCM26)
-(id=11,level1=15,level2=25,Tf=PCM27)
-(id=12,level1=25,level2=50,Tf=PCM28)
-(id=13,level1=50,level2=100,Tf=PCM29)
-(id=14,level1=100,level2=1e+20,Tf=PCM30) )
-
-Gfi_PCM_hus(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,10),
-missing=1e+20,
-range
-(id=1,level1=0,level2=0.0005,Tf=PCM20)
-(id=2,level1=0.0005,level2=0.001,Tf=PCM21)
-(id=3,level1=0.001,level2=0.002,Tf=PCM22)
-(id=4,level1=0.002,level2=0.004,Tf=PCM23)
-(id=5,level1=0.004,level2=0.006,Tf=PCM24)
-(id=6,level1=0.006,level2=0.008,Tf=PCM25)
-(id=7,level1=0.008,level2=0.01,Tf=PCM26)
-(id=8,level1=0.01,level2=0.012,Tf=PCM27)
-(id=9,level1=0.012,level2=0.014,Tf=PCM28)
-(id=10,level1=0.014,level2=0.016,Tf=PCM29)
-(id=11,level1=0.016,level2=0.018,Tf=PCM30)
-(id=12,level1=0.018,level2=0.02,Tf=PCM31) )
-
-
-Gfi_PCM_hur(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,10),
-missing=1e+20,
-range
-(id=1,level1=0,level2=10,Tf=PCM20)
-(id=2,level1=10,level2=20,Tf=PCM21)
-(id=3,level1=20,level2=30,Tf=PCM22)
-(id=4,level1=30,level2=40,Tf=PCM23)
-(id=5,level1=40,level2=50,Tf=PCM24)
-(id=6,level1=50,level2=60,Tf=PCM25)
-(id=7,level1=60,level2=70,Tf=PCM26)
-(id=8,level1=70,level2=80,Tf=PCM27)
-(id=9,level1=80,level2=90,Tf=PCM28)
-(id=10,level1=90,level2=100,Tf=PCM29)
-(id=11,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-Gfi_PCM_pr(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_prc(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_prsnc(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_prsnl(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_ps(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=97000,Tf=PCM30)
-(id=2,level1=97000,level2=97500,Tf=PCM29)
-(id=3,level1=97500,level2=98000,Tf=PCM28)
-(id=4,level1=98000,level2=98500,Tf=PCM27)
-(id=5,level1=98500,level2=99000,Tf=PCM26)
-(id=6,level1=99000,level2=99500,Tf=PCM25)
-(id=7,level1=99500,level2=100000,Tf=PCM24)
-(id=8,level1=100000,level2=100500,Tf=PCM23)
-(id=9,level1=100500,level2=101000,Tf=PCM22)
-(id=10,level1=101000,level2=101500,Tf=PCM21)
-(id=11,level1=101500,level2=102000,Tf=PCM20)
-(id=12,level1=102000,level2=102500,Tf=PCM19)
-(id=13,level1=102500,level2=103000,Tf=PCM18)
-(id=14,level1=103000,level2=103500,Tf=PCM17)
-(id=15,level1=103500,level2=104000,Tf=PCM35)
-(id=16,level1=104000,level2=1e+20,Tf=PCM36) )
-
-
-Gfi_PCM_psl(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=97000,Tf=PCM30)
-(id=2,level1=97000,level2=97500,Tf=PCM29)
-(id=3,level1=97500,level2=98000,Tf=PCM28)
-(id=4,level1=98000,level2=98500,Tf=PCM27)
-(id=5,level1=98500,level2=99000,Tf=PCM26)
-(id=6,level1=99000,level2=99500,Tf=PCM25)
-(id=7,level1=99500,level2=100000,Tf=PCM24)
-(id=8,level1=100000,level2=100500,Tf=PCM23)
-(id=9,level1=100500,level2=101000,Tf=PCM22)
-(id=10,level1=101000,level2=101500,Tf=PCM21)
-(id=11,level1=101500,level2=102000,Tf=PCM20)
-(id=12,level1=102000,level2=102500,Tf=PCM19)
-(id=13,level1=102500,level2=103000,Tf=PCM18)
-(id=14,level1=103000,level2=103500,Tf=PCM17)
-(id=15,level1=103500,level2=104000,Tf=PCM35)
-(id=16,level1=104000,level2=1e+20,Tf=PCM36) )
-
-Gfi_PCM_rlut(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=100,Tf=PCM16)
-(id=2,level1=100,level2=120,Tf=PCM30)
-(id=3,level1=120,level2=140,Tf=PCM29)
-(id=4,level1=140,level2=160,Tf=PCM28)
-(id=5,level1=160,level2=180,Tf=PCM27)
-(id=6,level1=180,level2=200,Tf=PCM26)
-(id=7,level1=200,level2=220,Tf=PCM25)
-(id=8,level1=220,level2=240,Tf=PCM24)
-(id=9,level1=240,level2=260,Tf=PCM23)
-(id=10,level1=260,level2=280,Tf=PCM22)
-(id=11,level1=280,level2=300,Tf=PCM21)
-(id=12,level1=300,level2=320,Tf=PCM20)
-(id=13,level1=320,level2=340,Tf=PCM19)
-(id=14,level1=340,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_rlutcs(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=100,Tf=PCM16)
-(id=2,level1=100,level2=120,Tf=PCM30)
-(id=3,level1=120,level2=140,Tf=PCM29)
-(id=4,level1=140,level2=160,Tf=PCM28)
-(id=5,level1=160,level2=180,Tf=PCM27)
-(id=6,level1=180,level2=200,Tf=PCM26)
-(id=7,level1=200,level2=220,Tf=PCM25)
-(id=8,level1=220,level2=240,Tf=PCM24)
-(id=9,level1=240,level2=260,Tf=PCM23)
-(id=10,level1=260,level2=280,Tf=PCM22)
-(id=11,level1=280,level2=300,Tf=PCM21)
-(id=12,level1=300,level2=320,Tf=PCM20)
-(id=13,level1=320,level2=340,Tf=PCM19)
-(id=14,level1=340,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_rsds(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=25,Tf=PCM16)
-(id=2,level1=25,level2=50,Tf=PCM30)
-(id=3,level1=50,level2=75,Tf=PCM29)
-(id=4,level1=75,level2=100,Tf=PCM28)
-(id=5,level1=100,level2=125,Tf=PCM27)
-(id=6,level1=125,level2=150,Tf=PCM34)
-(id=7,level1=150,level2=175,Tf=PCM33)
-(id=8,level1=175,level2=200,Tf=PCM32)
-(id=9,level1=200,level2=225,Tf=PCM23)
-(id=10,level1=225,level2=250,Tf=PCM22)
-(id=11,level1=250,level2=275,Tf=PCM21)
-(id=12,level1=275,level2=300,Tf=PCM20)
-(id=13,level1=300,level2=325,Tf=PCM19)
-(id=14,level1=325,level2=350,Tf=PCM18)
-(id=15,level1=350,level2=1e+20,Tf=PCM17) )
-
-Gfi_PCM_rsdscs(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=25,Tf=PCM16)
-(id=2,level1=25,level2=50,Tf=PCM30)
-(id=3,level1=50,level2=75,Tf=PCM29)
-(id=4,level1=75,level2=100,Tf=PCM28)
-(id=5,level1=100,level2=125,Tf=PCM27)
-(id=6,level1=125,level2=150,Tf=PCM34)
-(id=7,level1=150,level2=175,Tf=PCM33)
-(id=8,level1=175,level2=200,Tf=PCM32)
-(id=9,level1=200,level2=225,Tf=PCM23)
-(id=10,level1=225,level2=250,Tf=PCM22)
-(id=11,level1=250,level2=275,Tf=PCM21)
-(id=12,level1=275,level2=300,Tf=PCM20)
-(id=13,level1=300,level2=325,Tf=PCM19)
-(id=14,level1=325,level2=350,Tf=PCM18)
-(id=15,level1=350,level2=1e+20,Tf=PCM17) )
-
-Gfi_PCM_rsus(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=-25,Tf=PCM16)
-(id=2,level1=-25,level2=-50,Tf=PCM30)
-(id=3,level1=-50,level2=-75,Tf=PCM29)
-(id=4,level1=-75,level2=-100,Tf=PCM28)
-(id=5,level1=-100,level2=-125,Tf=PCM27)
-(id=6,level1=-125,level2=-150,Tf=PCM34)
-(id=7,level1=-150,level2=-175,Tf=PCM33)
-(id=8,level1=-175,level2=-200,Tf=PCM32)
-(id=9,level1=-200,level2=-225,Tf=PCM23)
-(id=10,level1=-225,level2=-250,Tf=PCM22)
-(id=11,level1=-250,level2=-275,Tf=PCM21)
-(id=12,level1=-275,level2=-300,Tf=PCM20)
-(id=13,level1=-300,level2=-325,Tf=PCM19)
-(id=14,level1=-325,level2=-350,Tf=PCM18)
-(id=15,level1=-350,level2=-400,Tf=PCM17)
-(id=16,level1=-400,level2=-1e+20,Tf=PCM35) )
-
-Gfi_PCM_rsut(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=-25,Tf=PCM16)
-(id=2,level1=-25,level2=-50,Tf=PCM30)
-(id=3,level1=-50,level2=-75,Tf=PCM29)
-(id=4,level1=-75,level2=-100,Tf=PCM28)
-(id=5,level1=-100,level2=-125,Tf=PCM27)
-(id=6,level1=-125,level2=-150,Tf=PCM34)
-(id=7,level1=-150,level2=-175,Tf=PCM33)
-(id=8,level1=-175,level2=-200,Tf=PCM32)
-(id=9,level1=-200,level2=-225,Tf=PCM23)
-(id=10,level1=-225,level2=-250,Tf=PCM22)
-(id=11,level1=-250,level2=-275,Tf=PCM21)
-(id=12,level1=-275,level2=-300,Tf=PCM20)
-(id=13,level1=-300,level2=-325,Tf=PCM19)
-(id=14,level1=-325,level2=-350,Tf=PCM18)
-(id=15,level1=-350,level2=-400,Tf=PCM17)
-(id=16,level1=-400,level2=-1e+20,Tf=PCM35) )
-
-
-Gfi_PCM_rsutcs(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=-25,Tf=PCM16)
-(id=2,level1=-25,level2=-50,Tf=PCM30)
-(id=3,level1=-50,level2=-75,Tf=PCM29)
-(id=4,level1=-75,level2=-100,Tf=PCM28)
-(id=5,level1=-100,level2=-125,Tf=PCM27)
-(id=6,level1=-125,level2=-150,Tf=PCM34)
-(id=7,level1=-150,level2=-175,Tf=PCM33)
-(id=8,level1=-175,level2=-200,Tf=PCM32)
-(id=9,level1=-200,level2=-225,Tf=PCM23)
-(id=10,level1=-225,level2=-250,Tf=PCM22)
-(id=11,level1=-250,level2=-275,Tf=PCM21)
-(id=12,level1=-275,level2=-300,Tf=PCM20)
-(id=13,level1=-300,level2=-325,Tf=PCM19)
-(id=14,level1=-325,level2=-350,Tf=PCM18)
-(id=15,level1=-350,level2=-400,Tf=PCM17)
-(id=16,level1=-400,level2=-1e+20,Tf=PCM35) )
-
-
-Gfi_PCM_ta(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-yaxisconvert=linear,
-missing=1e+20,
-range(id=1,level1=-1e+20,level2=203,Tf=PCM16)
-(id=2,level1=203,level2=213,Tf=PCM30)
-(id=3,level1=213,level2=223,Tf=PCM29)
-(id=4,level1=223,level2=233,Tf=PCM28)
-(id=5,level1=233,level2=243,Tf=PCM27)
-(id=6,level1=243,level2=253,Tf=PCM34)
-(id=7,level1=253,level2=263,Tf=PCM33)
-(id=8,level1=263,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tas(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tasmax(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tasmin(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_ts(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tauu(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_tauugwd(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_tauv(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_tauvgwd(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_ua(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-25,Tf=PCM29)
-(id=2,level1=-25,level2=-20,Tf=PCM28)
-(id=3,level1=-20,level2=-15,Tf=PCM27)
-(id=4,level1=-15,level2=-10,Tf=PCM26)
-(id=5,level1=-10,level2=-5,Tf=PCM25)
-(id=6,level1=-5,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=5,Tf=PCM23)
-(id=8,level1=5,level2=10,Tf=PCM22)
-(id=9,level1=10,level2=15,Tf=PCM21)
-(id=10,level1=15,level2=20,Tf=PCM20)
-(id=11,level1=20,level2=25,Tf=PCM19)
-(id=12,level1=25,level2=30,Tf=PCM17)
-(id=13,level1=30,level2=35,Tf=PCM35)
-(id=14,level1=35,level2=1e+20,Tf=PCM36) )
-
-
-Gfi_PCM_uas(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-21,Tf=PCM16)
-(id=2,level1=-21,level2=-18,Tf=PCM30)
-(id=3,level1=-18,level2=-15,Tf=PCM29)
-(id=4,level1=-15,level2=-12,Tf=PCM28)
-(id=5,level1=-12,level2=-9,Tf=PCM27)
-(id=6,level1=-9,level2=-6,Tf=PCM34)
-(id=7,level1=-6,level2=-3,Tf=PCM33)
-(id=8,level1=-3,level2=0,Tf=PCM32)
-(id=9,level1=0,level2=3,Tf=PCM23)
-(id=10,level1=3,level2=6,Tf=PCM22)
-(id=11,level1=6,level2=9,Tf=PCM21)
-(id=12,level1=9,level2=12,Tf=PCM20)
-(id=13,level1=12,level2=15,Tf=PCM19)
-(id=14,level1=15,level2=18,Tf=PCM18)
-(id=15,level1=18,level2=21,Tf=PCM17)
-(id=16,level1=21,level2=1e+20,Tf=PCM35)
-(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-Gfi_PCM_vas(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-10,Tf=PCM29)
-(id=2,level1=-10,level2=-8,Tf=PCM28)
-(id=3,level1=-8,level2=-6,Tf=PCM27)
-(id=4,level1=-6,level2=-4,Tf=PCM26)
-(id=5,level1=-4,level2=-2,Tf=PCM25)
-(id=6,level1=-2,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=2,Tf=PCM23)
-(id=8,level1=2,level2=4,Tf=PCM22)
-(id=9,level1=4,level2=6,Tf=PCM21)
-(id=10,level1=6,level2=8,Tf=PCM20)
-(id=11,level1=8,level2=10,Tf=PCM19)
-(id=12,level1=10,level2=12,Tf=PCM18)
-(id=13,level1=12,level2=14,Tf=PCM17)
-(id=14,level1=14,level2=1e+20,Tf=PCM35)
-(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-Gfi_PCM_va(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-xaxisconvert=linear,
-yaxisconvert=linear,
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-3,Tf=PCM29)
-(id=2,level1=-3,level2=-2.5,Tf=PCM28)
-(id=3,level1=-2.5,level2=-2,Tf=PCM27)
-(id=4,level1=-2,level2=-1.5,Tf=PCM26)
-(id=5,level1=-1.5,level2=-1,Tf=PCM25)
-(id=6,level1=-1,level2=-0.5,Tf=PCM24)
-(id=7,level1=-0.5,level2=0,Tf=PCM23)
-(id=8,level1=0,level2=0.5,Tf=PCM22)
-(id=9,level1=0.5,level2=1,Tf=PCM21)
-(id=10,level1=1,level2=1.5,Tf=PCM20)
-(id=11,level1=1.5,level2=2,Tf=PCM19)
-(id=12,level1=2,level2=2.5,Tf=PCM17)
-(id=13,level1=2.5,level2=3,Tf=PCM35)
-(id=14,level1=3,level2=1e+20,Tf=PCM36) )
-
-Gfi_PCM_wap(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-xaxisconvert=linear,
-yaxisconvert=linear,
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-0.03,Tf=PCM29)
-(id=2,level1=-0.03,level2=-0.025,Tf=PCM28)
-(id=3,level1=-0.025,level2=-0.02,Tf=PCM27)
-(id=4,level1=-0.02,level2=-0.015,Tf=PCM26)
-(id=5,level1=-0.015,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=-0.005,Tf=PCM24)
-(id=7,level1=-0.005,level2=0,Tf=PCM23)
-(id=8,level1=0,level2=0.005,Tf=PCM22)
-(id=9,level1=0.005,level2=0.01,Tf=PCM21)
-(id=10,level1=0.01,level2=0.015,Tf=PCM20)
-(id=11,level1=0.015,level2=0.02,Tf=PCM19)
-(id=12,level1=0.02,level2=0.025,Tf=PCM17)
-(id=13,level1=0.025,level2=0.03,Tf=PCM35)
-(id=14,level1=0.03,level2=1e+20,Tf=PCM36) )
-
-
-
-Gfi_PCM_zg(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-xaxisconvert=linear,
-yaxisconvert=linear,
-missing=1e+20,
-range
-(id=0,level1=-1e+20,level2=0,Tf=PCM241)
-(id=1,level1=0,level2=3000,Tf=PCM29)
-(id=2,level1=3000,level2=6000,Tf=PCM28)
-(id=3,level1=6000,level2=9000,Tf=PCM27)
-(id=4,level1=9000,level2=12000,Tf=PCM26)
-(id=5,level1=12000,level2=15000,Tf=PCM25)
-(id=6,level1=15000,level2=18000,Tf=PCM24)
-(id=7,level1=18000,level2=21000,Tf=PCM23)
-(id=8,level1=21000,level2=24000,Tf=PCM22)
-(id=9,level1=24000,level2=27000,Tf=PCM21)
-(id=10,level1=27000,level2=30000,Tf=PCM20)
-(id=11,level1=30000,level2=33000,Tf=PCM19)
-(id=12,level1=33000,level2=36000,Tf=PCM17)
-(id=13,level1=36000,level2=39000,Tf=PCM35)
-(id=14,level1=39000,level2=1e+20,Tf=PCM36) )
-
-C_PCM(
-   100,100,100,   0,0,0,   44.7,62.4,100,   29.8,44.3,62,   76.9,84.3,100,   100,100,0,
-   100,55.6863,16.4706,   0,0,0,   100,100,100,   0,0,0,   100,0,0,   0,100,0,
-   0,0,100,   100,100,0,   0,100,100,   100,0,100,   98.4314,98.4314,100,   78.4314,12.549,3.1373,
-   88.6274,20,5.4902,   94.5098,33.3333,12.549,   100,55.6863,16.4706,   99.6078,80,24.7059,   97.6471,87.8431,24.7059,   95.6863,100,24.3137,
-   79.2157,100,83.5294,   52.549,100,94.5098,   36.4706,100,94.5098,   7.0588,78.4314,100,   23.5294,52.9412,100,   57.6471,20.7843,99.6078,
-   84.7059,6.6667,99.6078,   100,0,100,   80.7843,100,15.6863,   44.3137,100,14.1176,   23.1373,85.098,56.8627,   65.8824,0,0,
-   47.451,8.2353,11.3725,   0,50,100,   0,40,100,   0,30,100,   0,20,100,   0,10,100,
-   0,0,100,   10,0,100,   20,0,100,   30,0,100,   40,0,100,   50,0,100,
-   60,0,100,   70,0,100,   80,0,100,   90,0,100,   100,0,100,   100,0,90,
-   100,0,80,   100,0,70,   100,0,60,   100,0,50,   100,0,40,   100,0,30,
-   100,0,20,   100,0,10,   100,0,0,   95,10,10,   90,20,20,   85,30,30,
-   80,40,40,   75,50,50,   70,60,60,   65,70,70,   60,80,80,   55,90,90,
-   50,100,100,   45,100,90,   40,100,80,   35,100,70,   30,100,60,   25,100,50,
-   20,100,40,   15,100,30,   10,100,20,   5,100,10,   0,100,0,   10,95,10,
-   20,90,20,   30,85,30,   40,80,40,   50,75,50,   60,70,60,   70,65,70,
-   80,60,80,   90,55,90,   100,50,100,   90,45,100,   80,40,100,   70,35,100,
-   60,30,100,   50,25,100,   40,20,100,   30,15,100,   20,10,100,   10,5,100,
-   0,0,100,   10,10,95,   20,20,90,   30,30,85,   40,40,80,   50,50,75,
-   60,60,70,   70,70,65,   80,80,60,   90,90,55,   100,100,50,   100,90,45,
-   100,80,40,   100,70,35,   100,60,30,   100,50,25,   100,40,20,   100,30,15,
-   100,20,10,   100,10,5,   100,0,0,   95,0,0,   90,0,0,   85,0,0,
-   80,0,0,   75,0,0,   70,0,0,   65,0,0,   60,0,0,   55,0,0,
-   50,0,0,   45,0,0,   40,0,0,   35,0,0,   30,0,0,   25,0,0,
-   20,0,0,   15,0,0,   10,0,0,   5,0,0,   0,0,0,   0,5,0,
-   0,10,0,   0,15,0,   0,20,0,   0,25,0,   0,30,0,   0,35,0,
-   0,40,0,   0,45,0,   0,50,0,   0,55,0,   0,60,0,   0,65,0,
-   0,70,0,   0,75,0,   0,80,0,   0,85,0,   0,90,0,   0,95,0,
-   0,100,0,   0,95,5,   0,90,10,   0,85,15,   0,80,20,   0,75,25,
-   0,70,30,   0,65,35,   0,60,40,   0,55,45,   0,50,50,   0,45,55,
-   0,40,60,   0,35,65,   0,30,70,   0,25,75,   0,20,80,   0,15,85,
-   0,10,90,   0,5,95,   0,0,100,   0,0,95,   0,0,90,   0,0,85,
-   0,0,80,   0,0,75,   0,0,70,   0,0,65,   0,0,60,   0,0,55,
-   0,0,50,   0,0,45,   0,0,40,   0,0,35,   0,0,30,   0,0,25,
-   0,0,20,   0,0,15,   0,0,10,   0,0,5,   0,0,0,   5,5,5,
-   10,10,10,   15,15,15,   20,20,20,   25,25,25,   30,30,30,   35,35,35,
-   40,40,40,   45,45,45,   50,50,50,   55,55,55,   60,60,60,   65,65,65,
-   70,70,70,   75,75,75,   80,80,80,   85,85,85,   90,90,90,   95,95,95,
-   100,100,100,   100,95,95,   100,90,90,   100,85,85,   100,80,80,   100,75,75,
-   100,70,70,   100,65,65,   100,60,60,   100,55,55,   100,50,50,   100,45,45,
-   100,40,40,   100,35,35,   100,30,30,   100,25,25,   100,20,20,   100,15,15)
diff --git a/images/UV-CDAT_logo.png b/images/UV-CDAT_logo.png
deleted file mode 100644
index 17f40d09e09f3347e2a80a16800fb7a6621ac509..0000000000000000000000000000000000000000
Binary files a/images/UV-CDAT_logo.png and /dev/null differ
diff --git a/images/UV-CDAT_logo_sites.png b/images/UV-CDAT_logo_sites.png
deleted file mode 100644
index 6d568b8256795e70a9f578a17d96bc36d256f09f..0000000000000000000000000000000000000000
Binary files a/images/UV-CDAT_logo_sites.png and /dev/null differ
diff --git a/images/add.gif b/images/add.gif
deleted file mode 100644
index 3f40d591d10f17eab54fefed078985c9d339dff3..0000000000000000000000000000000000000000
Binary files a/images/add.gif and /dev/null differ
diff --git a/images/animate_load.gif b/images/animate_load.gif
deleted file mode 100644
index a6563b031e0578d30a723f0a4aca512aecb3292e..0000000000000000000000000000000000000000
Binary files a/images/animate_load.gif and /dev/null differ
diff --git a/images/animate_save.gif b/images/animate_save.gif
deleted file mode 100644
index 8b1081c5433266032db537c41285962a9226ebce..0000000000000000000000000000000000000000
Binary files a/images/animate_save.gif and /dev/null differ
diff --git a/images/base10.gif b/images/base10.gif
deleted file mode 100644
index d3069446b6d24a350879a3821746fc0d388af4f1..0000000000000000000000000000000000000000
Binary files a/images/base10.gif and /dev/null differ
diff --git a/images/bookmark_folder.gif b/images/bookmark_folder.gif
deleted file mode 100644
index 28ffc21d1d71469dde82dd93f5af2bc5fd074f5b..0000000000000000000000000000000000000000
Binary files a/images/bookmark_folder.gif and /dev/null differ
diff --git a/images/cdatdemo.gif b/images/cdatdemo.gif
deleted file mode 100644
index f8ca3e6bda59a9fdc35d1a5557c8787dc9a0cce6..0000000000000000000000000000000000000000
Binary files a/images/cdatdemo.gif and /dev/null differ
diff --git a/images/cdatnews b/images/cdatnews
deleted file mode 100755
index 277b2d9aed4a438b3cba40a4326fb4b7f1def08c..0000000000000000000000000000000000000000
--- a/images/cdatnews
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-/usr/bin/env P4PORT=stargate.llnl.gov:1666 P4USER=p4review p4 changes -l | more
diff --git a/images/cos.gif b/images/cos.gif
deleted file mode 100644
index 32fde08a86176a89114e17bb003fe09c5e3dd7db..0000000000000000000000000000000000000000
Binary files a/images/cos.gif and /dev/null differ
diff --git a/images/cosh.gif b/images/cosh.gif
deleted file mode 100644
index 977887a1174029e8e7dbaef1c09c2fb0929fca2e..0000000000000000000000000000000000000000
Binary files a/images/cosh.gif and /dev/null differ
diff --git a/images/cycle.gif b/images/cycle.gif
deleted file mode 100644
index d472dcd90c27f838885b355f0dbcb80b5f5cf5b1..0000000000000000000000000000000000000000
Binary files a/images/cycle.gif and /dev/null differ
diff --git a/images/devel_20.gif b/images/devel_20.gif
deleted file mode 100644
index 2e1e1aa9b92fe0b6d21197ba35df8072300caf75..0000000000000000000000000000000000000000
Binary files a/images/devel_20.gif and /dev/null differ
diff --git a/images/devel_menu.gif b/images/devel_menu.gif
deleted file mode 100644
index c6448aa2569cee7d65e5dd16646eca64f6948cc6..0000000000000000000000000000000000000000
Binary files a/images/devel_menu.gif and /dev/null differ
diff --git a/images/divide.gif b/images/divide.gif
deleted file mode 100644
index 8e540dc04c054fe9128d8950dbe316b3d58a364a..0000000000000000000000000000000000000000
Binary files a/images/divide.gif and /dev/null differ
diff --git a/images/edit.gif b/images/edit.gif
deleted file mode 100644
index 1e6858c6610207f5a52f97ea5d09d394f6de3d26..0000000000000000000000000000000000000000
Binary files a/images/edit.gif and /dev/null differ
diff --git a/images/edit_20.gif b/images/edit_20.gif
deleted file mode 100644
index f853cc7a04bde3cebb64ab02224f8688d7447c88..0000000000000000000000000000000000000000
Binary files a/images/edit_20.gif and /dev/null differ
diff --git a/images/edit_menu.gif b/images/edit_menu.gif
deleted file mode 100644
index 913c12821be01a5cfea7f314f577bda7fd3165aa..0000000000000000000000000000000000000000
Binary files a/images/edit_menu.gif and /dev/null differ
diff --git a/images/editdelete.gif b/images/editdelete.gif
deleted file mode 100644
index f0a0da26f4b6ba50f29f1b628e2eab8c135296c0..0000000000000000000000000000000000000000
Binary files a/images/editdelete.gif and /dev/null differ
diff --git a/images/editdelete_20.gif b/images/editdelete_20.gif
deleted file mode 100644
index 8bd6e28d0bd14b070e93e57f9b19ae4f74a01daf..0000000000000000000000000000000000000000
Binary files a/images/editdelete_20.gif and /dev/null differ
diff --git a/images/editdelete_menu.gif b/images/editdelete_menu.gif
deleted file mode 100644
index e6b4f3cbe5ed2fda84455f5f2853ba0868256a85..0000000000000000000000000000000000000000
Binary files a/images/editdelete_menu.gif and /dev/null differ
diff --git a/images/equal.gif b/images/equal.gif
deleted file mode 100644
index 3aa6e64035a58aa6243a57c8735f9926bb30980e..0000000000000000000000000000000000000000
Binary files a/images/equal.gif and /dev/null differ
diff --git a/images/exp.gif b/images/exp.gif
deleted file mode 100644
index af87ff8a84c1aa33e8a7897bf9de93eca2080da2..0000000000000000000000000000000000000000
Binary files a/images/exp.gif and /dev/null differ
diff --git a/images/fabs.gif b/images/fabs.gif
deleted file mode 100644
index c39cf67954189fb90472d22c77f148b05a7de505..0000000000000000000000000000000000000000
Binary files a/images/fabs.gif and /dev/null differ
diff --git a/images/function.gif b/images/function.gif
deleted file mode 100644
index c60f72277df2b0bd21e3591365e1c86649e1d317..0000000000000000000000000000000000000000
Binary files a/images/function.gif and /dev/null differ
diff --git a/images/getmask.gif b/images/getmask.gif
deleted file mode 100644
index f448b1aa22839cbcc903502dc9782d627f1f4fed..0000000000000000000000000000000000000000
Binary files a/images/getmask.gif and /dev/null differ
diff --git a/images/gohome.gif b/images/gohome.gif
deleted file mode 100644
index a6cfab3dd368225d45aa4b7703f5c9702ebe7204..0000000000000000000000000000000000000000
Binary files a/images/gohome.gif and /dev/null differ
diff --git a/images/greater.gif b/images/greater.gif
deleted file mode 100644
index d8bb1375c3999d3ab2935937e2268e6bba57b29e..0000000000000000000000000000000000000000
Binary files a/images/greater.gif and /dev/null differ
diff --git a/images/grower.gif b/images/grower.gif
deleted file mode 100644
index d33b2ea196f8381e34ea75397d77c4c615402ea1..0000000000000000000000000000000000000000
Binary files a/images/grower.gif and /dev/null differ
diff --git a/images/hand1.gif b/images/hand1.gif
deleted file mode 100644
index 382d9a103891750101be762c2ffbd6f582c15953..0000000000000000000000000000000000000000
Binary files a/images/hand1.gif and /dev/null differ
diff --git a/images/hand2.gif b/images/hand2.gif
deleted file mode 100644
index 7b3748df8e54c10db1f15681ff0d1d250f392bde..0000000000000000000000000000000000000000
Binary files a/images/hand2.gif and /dev/null differ
diff --git a/images/info.gif b/images/info.gif
deleted file mode 100644
index af52cdd9a8945497fdd34b02866f9f241a3817ca..0000000000000000000000000000000000000000
Binary files a/images/info.gif and /dev/null differ
diff --git a/images/info_20.gif b/images/info_20.gif
deleted file mode 100644
index 83bac5312fd6d536f4c0dca1e8908cee059695fe..0000000000000000000000000000000000000000
Binary files a/images/info_20.gif and /dev/null differ
diff --git a/images/info_menu.gif b/images/info_menu.gif
deleted file mode 100644
index 267fe64fe8e21e2ca31eb157d58209c688a6a2c7..0000000000000000000000000000000000000000
Binary files a/images/info_menu.gif and /dev/null differ
diff --git a/images/inpin_red.gif b/images/inpin_red.gif
deleted file mode 100644
index e85576af15695dacabd7bbd50e0a8b8ff6d9c69d..0000000000000000000000000000000000000000
Binary files a/images/inpin_red.gif and /dev/null differ
diff --git a/images/inverse.gif b/images/inverse.gif
deleted file mode 100644
index 16bed0d8cf7badb4e2e47d19827b782390b3db82..0000000000000000000000000000000000000000
Binary files a/images/inverse.gif and /dev/null differ
diff --git a/images/less.gif b/images/less.gif
deleted file mode 100644
index 7cdd74fca35422f87d5d919fcc8b65013587ef38..0000000000000000000000000000000000000000
Binary files a/images/less.gif and /dev/null differ
diff --git a/images/list_20.gif b/images/list_20.gif
deleted file mode 100644
index b26ab878afa72193d8a6031962d64e61348cbde6..0000000000000000000000000000000000000000
Binary files a/images/list_20.gif and /dev/null differ
diff --git a/images/lock.gif b/images/lock.gif
deleted file mode 100644
index b63a8667011bc7faebb2d9150447743e1eef3f9f..0000000000000000000000000000000000000000
Binary files a/images/lock.gif and /dev/null differ
diff --git a/images/log.gif b/images/log.gif
deleted file mode 100644
index d570f0b3b343001eb91c3b40427b27749bca3e6f..0000000000000000000000000000000000000000
Binary files a/images/log.gif and /dev/null differ
diff --git a/images/log_20.gif b/images/log_20.gif
deleted file mode 100644
index 8191b4af41ee5479589b794b504fd42df2a72cb1..0000000000000000000000000000000000000000
Binary files a/images/log_20.gif and /dev/null differ
diff --git a/images/log_menu.gif b/images/log_menu.gif
deleted file mode 100644
index 47b6856430d9059fca381849a3e8a106a27fb832..0000000000000000000000000000000000000000
Binary files a/images/log_menu.gif and /dev/null differ
diff --git a/images/mask.gif b/images/mask.gif
deleted file mode 100644
index 69ca60ba6bef2cf16f0f46087d43dce919b8b5ea..0000000000000000000000000000000000000000
Binary files a/images/mask.gif and /dev/null differ
diff --git a/images/mlog.gif b/images/mlog.gif
deleted file mode 100644
index e0ed411d2815ad7a00c78936baf6973588cde06f..0000000000000000000000000000000000000000
Binary files a/images/mlog.gif and /dev/null differ
diff --git a/images/mlog10.gif b/images/mlog10.gif
deleted file mode 100644
index 4c64ac7dd8b8fa4f883011dea48ac36055995a3c..0000000000000000000000000000000000000000
Binary files a/images/mlog10.gif and /dev/null differ
diff --git a/images/multiply.gif b/images/multiply.gif
deleted file mode 100644
index 7d5e63f5cff4ea0eea76d9200f15ba03424ae8e9..0000000000000000000000000000000000000000
Binary files a/images/multiply.gif and /dev/null differ
diff --git a/images/not.gif b/images/not.gif
deleted file mode 100644
index 0a3c3a0559a9295c67916e62cdb6b840bdc948ec..0000000000000000000000000000000000000000
Binary files a/images/not.gif and /dev/null differ
diff --git a/images/off.gif b/images/off.gif
deleted file mode 100644
index 3e53637a0a44d3707de2beb90a72c5862cf3c6b9..0000000000000000000000000000000000000000
Binary files a/images/off.gif and /dev/null differ
diff --git a/images/on.gif b/images/on.gif
deleted file mode 100644
index fb75f06ae5a5c1d3e0641e880bbf14075438dc68..0000000000000000000000000000000000000000
Binary files a/images/on.gif and /dev/null differ
diff --git a/images/open.gif b/images/open.gif
deleted file mode 100644
index 2aa70cdf0feb246156c1026936f026616c18d35a..0000000000000000000000000000000000000000
Binary files a/images/open.gif and /dev/null differ
diff --git a/images/opendap.gif b/images/opendap.gif
deleted file mode 100644
index 346345da29b30c14547776c5ee268a34086737a6..0000000000000000000000000000000000000000
Binary files a/images/opendap.gif and /dev/null differ
diff --git a/images/outpin_red.gif b/images/outpin_red.gif
deleted file mode 100644
index c1946afbb23d497ef1323bf22bbf317488708d7b..0000000000000000000000000000000000000000
Binary files a/images/outpin_red.gif and /dev/null differ
diff --git a/images/pan_down.gif b/images/pan_down.gif
deleted file mode 100644
index 4ff930a62c592377d8f6295c82e6599df436d983..0000000000000000000000000000000000000000
Binary files a/images/pan_down.gif and /dev/null differ
diff --git a/images/pan_left.gif b/images/pan_left.gif
deleted file mode 100644
index 2e84b439214f6495cbb600048c90e7f17426ccf3..0000000000000000000000000000000000000000
Binary files a/images/pan_left.gif and /dev/null differ
diff --git a/images/pan_right.gif b/images/pan_right.gif
deleted file mode 100644
index a6a3a27e3e553b9a3ea4e2f65664da76aa11a2b3..0000000000000000000000000000000000000000
Binary files a/images/pan_right.gif and /dev/null differ
diff --git a/images/pan_up.gif b/images/pan_up.gif
deleted file mode 100644
index ed12afba32c8bd20a43b7a0a982c97d32baedbdc..0000000000000000000000000000000000000000
Binary files a/images/pan_up.gif and /dev/null differ
diff --git a/images/player_end2.gif b/images/player_end2.gif
deleted file mode 100644
index ca90804acc832ae6f2e6bcc66044ad4798b52d04..0000000000000000000000000000000000000000
Binary files a/images/player_end2.gif and /dev/null differ
diff --git a/images/player_pause.gif b/images/player_pause.gif
deleted file mode 100644
index 9b88ec5ebd4bb5d5e476614f051c1391c546b891..0000000000000000000000000000000000000000
Binary files a/images/player_pause.gif and /dev/null differ
diff --git a/images/player_play.gif b/images/player_play.gif
deleted file mode 100644
index e610d842485044c82a7a04a2732d79f71a97612a..0000000000000000000000000000000000000000
Binary files a/images/player_play.gif and /dev/null differ
diff --git a/images/player_rev.gif b/images/player_rev.gif
deleted file mode 100644
index 4fdabd3115997603c836f4b7a227e3f2a9fe4f03..0000000000000000000000000000000000000000
Binary files a/images/player_rev.gif and /dev/null differ
diff --git a/images/player_start.gif b/images/player_start.gif
deleted file mode 100644
index add7c1cd99d223f47fe37d508ee106f50379708e..0000000000000000000000000000000000000000
Binary files a/images/player_start.gif and /dev/null differ
diff --git a/images/player_stop.gif b/images/player_stop.gif
deleted file mode 100644
index cbceec23c1bcb3c9a2b7568532b9fe77a9e8c0cb..0000000000000000000000000000000000000000
Binary files a/images/player_stop.gif and /dev/null differ
diff --git a/images/power.gif b/images/power.gif
deleted file mode 100644
index a998b721bf651c72ac0eba53ac09cb08d5d3d65b..0000000000000000000000000000000000000000
Binary files a/images/power.gif and /dev/null differ
diff --git a/images/pydebug b/images/pydebug
deleted file mode 100755
index 30262bfe20c0e9607651b94e65eb38883a76fa9f..0000000000000000000000000000000000000000
--- a/images/pydebug
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/csh -f
-unsetenv PYTHONPATH
-unsetenv PYTHONHOME
-set bindir = `dirname $0`
-set pyver=`${bindir}/python -c "import sys;print 'python'+sys.version[0:3],"`
-set libdirdir = `dirname ${bindir}`
-set libdir = ${libdirdir}/lib
-# setenv PYTHONPATH "${libdir}/${pyver}/site-packages/apps:${libdir}/${pyver}/site-packages/vtk"
-# setenv LD_LIBRARY_PATH "${libdir}:${libdir}/${pyver}/site-packages/vtk:/usr/local/lib:/usr/local/X11R6/lib:/usr/lib"
-if "$1" == "" then
-   echo "Usage: pydebug python_file"
-else
-   exec $bindir/python ${libdir}/${pyver}/site-packages/pydebug/pydebug.py $*
-endif
diff --git a/images/pythonenv b/images/pythonenv
deleted file mode 100755
index f19471f01f3ccc36ac3a1aee29e71a8f676f9870..0000000000000000000000000000000000000000
--- a/images/pythonenv
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh -f
-echo $0
-echo "This script shows you the environment variables relevant to running python."
-echo PYTHONPATH=${PYTHONPATH:-'not set'}
-echo PYTHONSTARTUP=${PYTHONSTARTUP:-'not set'}
-echo PYTHONHOME=${PYTHONHOME:-'not set'}
-echo LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-'not set'}
-echo "You should not have PYTHONHOME set."
-echo "It is ok to have PYTHONPATH set to a directory of your own scripts."
-echo "It is ok to have PYTHONSTARTUP set to a script you want run"; \
-echo "when starting up Python interactively."
-    
-   
-
diff --git a/images/recycle.gif b/images/recycle.gif
deleted file mode 100644
index 704e054eb0b026feb9d8c4ce27107097db317b82..0000000000000000000000000000000000000000
Binary files a/images/recycle.gif and /dev/null differ
diff --git a/images/recycle_20.gif b/images/recycle_20.gif
deleted file mode 100644
index df7fa308a177a5ab0ef2ce62496e2dae5fd9a1d2..0000000000000000000000000000000000000000
Binary files a/images/recycle_20.gif and /dev/null differ
diff --git a/images/recycle_file.gif b/images/recycle_file.gif
deleted file mode 100644
index f1ab50cd08887ed354bad17c69af9f5ea4ba0647..0000000000000000000000000000000000000000
Binary files a/images/recycle_file.gif and /dev/null differ
diff --git a/images/recycle_menu.gif b/images/recycle_menu.gif
deleted file mode 100644
index 74489e27d5e1ac59f58e4067b33767eb6d57ed0b..0000000000000000000000000000000000000000
Binary files a/images/recycle_menu.gif and /dev/null differ
diff --git a/images/regrid.gif b/images/regrid.gif
deleted file mode 100644
index 9777ff3cbba3a9761b0ea02bed4a9ab332ba79b4..0000000000000000000000000000000000000000
Binary files a/images/regrid.gif and /dev/null differ
diff --git a/images/remove.gif b/images/remove.gif
deleted file mode 100644
index 12e81f86fd2dcc29472549b0e56c7fd07c2cebda..0000000000000000000000000000000000000000
Binary files a/images/remove.gif and /dev/null differ
diff --git a/images/save.gif b/images/save.gif
deleted file mode 100644
index bfbcf8058684519e1c10c716bff4ec5d9d50f9aa..0000000000000000000000000000000000000000
Binary files a/images/save.gif and /dev/null differ
diff --git a/images/save_20.gif b/images/save_20.gif
deleted file mode 100644
index b900e48013de1c544c8228d51b88c3a8ee6d6d66..0000000000000000000000000000000000000000
Binary files a/images/save_20.gif and /dev/null differ
diff --git a/images/save_file.gif b/images/save_file.gif
deleted file mode 100644
index dde653798fa9c4a739a960b7b29a80b2a92cdb90..0000000000000000000000000000000000000000
Binary files a/images/save_file.gif and /dev/null differ
diff --git a/images/save_menu.gif b/images/save_menu.gif
deleted file mode 100644
index 17fb7640b899e88a1d60d86264d970566c4b2ce5..0000000000000000000000000000000000000000
Binary files a/images/save_menu.gif and /dev/null differ
diff --git a/images/sin.gif b/images/sin.gif
deleted file mode 100644
index 27f1b4ff1c2ba5d9553c5ddb4b5e8750ffce6692..0000000000000000000000000000000000000000
Binary files a/images/sin.gif and /dev/null differ
diff --git a/images/sinh.gif b/images/sinh.gif
deleted file mode 100644
index 207e16b23986c5b4ba34567441c03684832e50e7..0000000000000000000000000000000000000000
Binary files a/images/sinh.gif and /dev/null differ
diff --git a/images/spk2scr.py b/images/spk2scr.py
deleted file mode 100755
index 03fa37519f366ff15e4aa2316d37c3514eaf58a7..0000000000000000000000000000000000000000
--- a/images/spk2scr.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env python
-import vcs,os
-
-version = '1.0'
-general_description = """
-    Reads in and converts Ferret (spk) colormap file to vcs colormap
-    If method is set to 'blend':
-      colors will be set using the ferret % value, and blending will be used in between
-      0% in ferret corresponds to index_start
-      100% in ferret corresponds to index_end
-    If method is set to 'contiguous':
-      colors will be set starting at index_start and assigned in order as found in the ferret (spk) file, no blending between colors
-    """
-
-def spk2vcs(file,cname=None,x=None,index_start=16,index_end=239,method='blend',verbose=False):
-    """ %s
-    Usage:
-    cmap, ncolors = spk2vcs(file,cname=None,x=None)
-    Input:
-    file                     : Ferret (spk) colormap file
-    cname                    : VCS output colormap name, if None, uses ferret file name
-    x                        : vcs canvas, if None then a vcs canvas instance will be created
-    index_start              : 0%% of ferret %% index, default is 16
-    index_end                : 100%% of ferret %% index, defalut is 239
-    method                   : 'blend' or 'adjacent', defalut is 'blend'
-    Output:
-    cmap                     : vcs colormap object, with conitguous color set from index_Start if method='contiguous'
-                               or spread from index_start to index_end if method is 'blend'
-    """ 
-
-    f=open(file)
-    ln=f.readlines()
-    # Treat colormap name
-    if cname is None:
-        cname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1])
-        if verbose: print 'Colormap name:',cname
-
-    if x is None:
-        x=vcs.init()
-    cmap=x.createcolormap(cname)
-    x.setcolormap(cmap.name)
-    ncolors = 0
-    last_index = index_start
-    if verbose: print 'Method:',method
-    for l in ln:
-        sp=l.split()
-        if len(sp)!=4: # Is it a line with 4 values (p,r,g,b)?
-            continue
-        p,r,g,b=sp
-        try: # Are the 4 values float?
-            p=float(p)
-            r=float(r)
-            g=float(g)
-            b=float(b)
-        except:
-            continue
-        if method == 'contiguous':
-            x.setcolorcell(index_start + ncolors, int(r), int(g), int(b))
-            if verbose: print 'Setting cell %s to: %s, %s, %s' % (index_start + ncolors, int(r), int(g), int(b))
-            cmap=x.getcolormap(cmap.name)
-            ncolors+=1
-        else:
-            index = index_start + int(p*(index_end-index_start)/100.)
-            x.setcolorcell( index, int(r), int(g), int(b))
-            cmap=x.getcolormap(cmap.name)
-            if verbose: print 'Setting cell %s to: %s, %s, %s' % (index, int(r), int(g), int(b))
-            dr = cmap.index[index][0] - cmap.index[last_index][0]
-            dg = cmap.index[index][1] - cmap.index[last_index][1]
-            db = cmap.index[index][2] - cmap.index[last_index][2]
-            for indx in range(last_index+1,index):
-                p = float(indx-last_index)/float(index-last_index)
-                r = cmap.index[last_index][0]+int(p*dr)
-                g = cmap.index[last_index][1]+int(p*dg)
-                b = cmap.index[last_index][2]+int(p*db)
-                x.setcolorcell(indx , r, g, b)
-                if verbose: print '\t Sub-setting cell %s to: %s, %s, %s' % (indx , r, g, b)
-                cmap=x.getcolormap(cmap.name)
-            last_index = index
-    return cmap
-setattr(spk2vcs,'__doc__',spk2vcs.__doc__ %  general_description)
-
-if __name__=='__main__':
-    import optparse
-    op=optparse.OptionParser(usage="%%prog [options]\n%s" % general_description,version="%%prog %s" % version)
-    op.add_option("--file",dest='file',help="Ferret (spk) colormap file to convert, [default: %default]",default="pal1.spk")
-    op.add_option("--name",dest="name",help="Name of the returned vcs colormap, [default: uses ferret (spk) file name]",default='default')
-    op.add_option("--out",dest="out",help="Name of the returned vcs script file, [default: file.scr]",default='default')
-    op.add_option("--index_start",dest="index_start",type='int',help='start index for mapping of ferret colors into vcs colormap, [default: %default]',default=16)
-    op.add_option("--index_end",dest="index_end",type='int',help='end index for mapping of ferret colors into vcs colormap, [default: %default]',default=239)
-    op.add_option("--method",dest="method",help='method for mapping of ferret colors into vcs colormap (blend or contiguous), [default: %default]',default='blend')
-    op.add_option("--blend",dest="blend",action='store_true',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True)
-    op.add_option("--contiguous",dest="blend",action='store_false',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True)
-    op.add_option("--verbose",dest="verbose",action='store_true',help='Enable verbose screen output while converting colorcells, [default: %default]',default=False)
-    
-    op,args = op.parse_args()
-
-    if op.method in [ 'contiguous','blend']:
-        method = op.method
-    else:
-        op.error("options method can ONLY be either blend or contiguous")
-
-    if op.blend is True:
-        method = 'blend'
-    else:
-        method = 'contiguous'
-
-    if op.name == 'default':
-        cname = None
-        
-    cmap  = spk2vcs(op.file,index_start=op.index_start,index_end=op.index_end,method=method,cname=cname,verbose=op.verbose)
-
-    if op.out == 'default':
-        oname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1])+'.scr'
-    cmap.script(oname)
-    print 'Done, colormap converted to VCS using "%s" method from index %s to index %s\nStored in file: %s' % (method,op.index_start,op.index_end,oname)
-    
-      
diff --git a/images/splash.gif b/images/splash.gif
deleted file mode 100755
index 3fb4ad9ae6145b376ee7e4bd0340eb2ff2a02675..0000000000000000000000000000000000000000
Binary files a/images/splash.gif and /dev/null differ
diff --git a/images/sqrt.gif b/images/sqrt.gif
deleted file mode 100644
index 5bad811d7db4aae7ab61a37385409ba5a4b55ec1..0000000000000000000000000000000000000000
Binary files a/images/sqrt.gif and /dev/null differ
diff --git a/images/std.gif b/images/std.gif
deleted file mode 100644
index 61e29c4ad7cfc08d5387d5087da8491b854af16a..0000000000000000000000000000000000000000
Binary files a/images/std.gif and /dev/null differ
diff --git a/images/subtract.gif b/images/subtract.gif
deleted file mode 100644
index ece53af6fa0268426160ec0b381484f94e5d1f26..0000000000000000000000000000000000000000
Binary files a/images/subtract.gif and /dev/null differ
diff --git a/images/tan.gif b/images/tan.gif
deleted file mode 100644
index bba5910b4c51ec47b7b3bd8f96ec6c0f9c71d662..0000000000000000000000000000000000000000
Binary files a/images/tan.gif and /dev/null differ
diff --git a/images/tanh.gif b/images/tanh.gif
deleted file mode 100644
index 60753ac6abb4f1a6eb81dc3356a6d7aad547e6ee..0000000000000000000000000000000000000000
Binary files a/images/tanh.gif and /dev/null differ
diff --git a/images/templator b/images/templator
deleted file mode 100755
index a8c9f2e9bd6555a55fe3f8e4e95a317addf6cafc..0000000000000000000000000000000000000000
--- a/images/templator
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh -f
-unset PYTHONHOME
-opt=$1
-bindir=`dirname $0`
-if (test "--help" = "$1") then
-   echo 'Usage: templator [template_name]'; exit 0
-fi
-set path="${bindir}:$path" #in case of respawns, get our python
-exec ${bindir}/python -O -c "import vcs, gui_support;vcs.templateeditorgui.create(template_name='${opt}');gui_support.root().mainloop()"
-
diff --git a/images/tg2_20.gif b/images/tg2_20.gif
deleted file mode 100644
index c2390d374da6e88429f164dd27b3dadeef2aa47c..0000000000000000000000000000000000000000
Binary files a/images/tg2_20.gif and /dev/null differ
diff --git a/images/tg_20.gif b/images/tg_20.gif
deleted file mode 100644
index b26ab878afa72193d8a6031962d64e61348cbde6..0000000000000000000000000000000000000000
Binary files a/images/tg_20.gif and /dev/null differ
diff --git a/images/tg_menu.gif b/images/tg_menu.gif
deleted file mode 100644
index 7e08f4ad83538d6e03d87e4929a60061395b8ca8..0000000000000000000000000000000000000000
Binary files a/images/tg_menu.gif and /dev/null differ
diff --git a/images/tiltedpin_red.gif b/images/tiltedpin_red.gif
deleted file mode 100644
index db91095014e706c3169ad1c981ab32a3b039b239..0000000000000000000000000000000000000000
Binary files a/images/tiltedpin_red.gif and /dev/null differ
diff --git a/images/toggle_menu.gif b/images/toggle_menu.gif
deleted file mode 100644
index 3a2e664df08f4a931d7a7e049acb822391220b18..0000000000000000000000000000000000000000
Binary files a/images/toggle_menu.gif and /dev/null differ
diff --git a/images/trashcan_empty.gif b/images/trashcan_empty.gif
deleted file mode 100644
index cbd89f784853939528324baf11f1fd12c9080723..0000000000000000000000000000000000000000
Binary files a/images/trashcan_empty.gif and /dev/null differ
diff --git a/images/trashcan_empty_20.gif b/images/trashcan_empty_20.gif
deleted file mode 100644
index ecb0c3ed762ed2fea827a0bdadcc58c8d63511aa..0000000000000000000000000000000000000000
Binary files a/images/trashcan_empty_20.gif and /dev/null differ
diff --git a/images/trashcan_full.gif b/images/trashcan_full.gif
deleted file mode 100644
index 39acb09dc13cb93f6d1907820d2cbc999eb5a29a..0000000000000000000000000000000000000000
Binary files a/images/trashcan_full.gif and /dev/null differ
diff --git a/images/trashcan_full_20.gif b/images/trashcan_full_20.gif
deleted file mode 100644
index 97ed55ba861960dda5ff3962222823ca8c076fac..0000000000000000000000000000000000000000
Binary files a/images/trashcan_full_20.gif and /dev/null differ
diff --git a/images/trashcan_menu.gif b/images/trashcan_menu.gif
deleted file mode 100644
index 1b8b1af82d7a3475df0699284426825fad13f4d5..0000000000000000000000000000000000000000
Binary files a/images/trashcan_menu.gif and /dev/null differ
diff --git a/images/unlock.gif b/images/unlock.gif
deleted file mode 100644
index da0b2f547f6573fe644ea0fbe233fd7e37bc89c0..0000000000000000000000000000000000000000
Binary files a/images/unlock.gif and /dev/null differ
diff --git a/images/vcs2py.py b/images/vcs2py.py
deleted file mode 100755
index bb06fc09fcb7bbf7e9f0ac697f3e4f797a0a8abf..0000000000000000000000000000000000000000
--- a/images/vcs2py.py
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/env python
-
-""" This script converts old vcs scripts to python scripts
-This is version 0. graphic method and template won't be converted
-Therefore old script still needed around in order to load all graphic methods needed
-
-Not implemented yet, to do:
-xname, xunits, etc....
-Transform, Logical mask
-"""
-version='0.2'
-import sys
-import vcs
-import cdms
-
-## initialize some stats
-warnings=0
-vcscmd=0
-arrays=0
-arrays_from_file=0
-plots=0
-active_plots=0
-unimplemented=0
-vcs_objects=0
-## Continents overlaying (default none)
-overlay_continents=''
-
-## Determine input script
-src=sys.argv[1]
-
-## Generate output name
-outnm=src[:-4]+'.py'
-
-## Generate output name for graphic methods, templates, etc...
-outnm_vcs=src[:-4]+'_vcs.scr'
-f2=open(outnm_vcs,'w')
-
-# open input script file
-fi=open(src,'r')
-
-## Opens output script file
-f=open(outnm,'w')
-f.write('#/usr/bin/env python\nimport vcs\nimport cdms\nimport MV\nx=vcs.init()\n\n')
-f.write('"""Python script autogenerated using vcs2py version '+version+'\n')
-f.write('Input VCS script: '+src+'\n"""\n')
-f.write('## First load all the necessary template and graphic methods from the old script\nx.scriptrun(\''+outnm_vcs+'\')\n')
-f.write("## Individual python code for individual vcs object can be generated by loading the object and saving it to a file\n## e.g: t=x.getboxfill('default')\n## x.scriptobject(t,'myfile.py')\n\n")
-
-## Opens file for graphic methods rewriting
-
-## Ok now let's loop through all lines and figure out commands
-ln=fi.readlines()
-n=len(ln)
-
-def extract(instring,beg,end=','):
-    """ Extract part of a string between 2 characters def, returns None if not existing
-    Usage: val = extract(instring,beg,end=',')
-    """
-    try:
-        sp=instring.split(beg)[1]
-        sp=sp.split(end)[0]
-        if sp[-1]==instring[-1]:
-            sp=sp[:-1]
-    except:
-        sp=None
-    return sp
-for i in range(n):
-    l=ln[i]
-    #print l
-    iprint=0
-    if l[:4]=='Page':
-        vcscmd+=1
-        val=l[5:].split(')')[0]
-        f.write('x.'+val+'()\n\n')
-    elif l.split('_')[0] in ['L','Tt','To','Tl','Tf','Tm','Th','C','P',
-                                    'Gi','Gfb','Gfi','Gfo','Go','GSp','Gv','GXY','GXy','GYx']:
-        # First reconstruct the full name
-        nbracket=l.count('(')
-        vcs_objects+=1
-        j=1
-        f2.write(ln[i])
-        nbracket-=l.count(')')
-        while nbracket>0:
-            f2.write(ln[i+j])
-            nbracket+=ln[i+j].count('(')
-            nbracket-=ln[i+j].count(')')
-            j+=1
-            
-    elif l[:5]=='Sleep':
-        vcscmd+=1
-        val=l[6:].split(')')[0]
-        f.write('import time\ntime.sleep('+val+')\n\n')
-    elif l[:4]=='Over':
-        vcscmd+=1
-        overlay_continents=',continents='
-        n=l[19:].split(')')[0]
-        overlay_continents+=n
-    elif l[:3].lower()=='cgm':
-        vcscmd+=1
-        args=l[4:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        cgmfnm=sp[0]
-        if len(sp)>1:
-            app=sp[1][0]
-        else:
-            app="'a'"
-        f.write("x.cgm('"+cgmfnm+"',"+app+")\n\n")
-    elif l[:3].lower()=='run':
-        vcscmd+=1
-        args=l[4:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        scrfnm=sp[0]
-        f.write("## Warning the following will only load the templates/graphic methods\n")
-        f.write("## To excute commands convert script to file and uncoment the following line\n")
-        warnings+=1
-        print 'Warning: Run script, will not execute any command, you need to convert it first and uncoment the line in the python script'
-        pyfnm=scrfnm.replace('.scr','.py')
-        f.write("## execfile('"+pyfnm+"')\n")
-        f.write("x.scriptrun('"+scrfnm+"')\n\n")
-    elif l[:6].lower()=='raster':
-        vcscmd+=1
-        args=l[7:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        cgmfnm=sp[0]
-        if len(sp)>1:
-            app=sp[1][0]
-        else:
-            app="'a'"
-        f.write("x.raster('"+cgmfnm+"',"+app+")\n\n")
-    elif l[:3].lower() in['drs','hdf']:
-        vcscmd+=1
-        warnings+=1
-        args=l[4:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        ncfnm=sp[0]
-        ncfnm=ncfnm.replace('.dic','.nc')
-        ncfnm=ncfnm.replace('.hdf','.nc')
-        if len(sp)>2:
-            app=sp[2][0]
-            if app=='r':app="'w'"
-            if app=='a':app="'r+'"
-        else:
-            app="'w'"
-        array=sp[1]
-        print 'WARNING: Output file converted from '+l[:3]+' to NetCDF'
-        f.write("f=cdms.open('"+ncfnm+"',"+app+")\n")
-        f.write("f.write("+array+","+app+")\n")
-        f.write('f.close()\n\n')
-    elif l[:6].lower()=='netcdf':
-        vcscmd+=1
-        args=l[7:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        ncfnm=sp[0]
-        if len(sp)>2:
-            app=sp[2][0]
-            if app=='r':app="'w'"
-            if app=='a':app="'r+'"
-        else:
-            app="'w'"
-        array=sp[1]
-        f.write("f=cdms.open('"+ncfnm+"',"+app+")\n")
-        f.write("f.write("+array+","+app+")\n")
-        f.write('f.close()\n\n')
-    elif l[:5].lower()=='clear':
-        vcscmd+=1
-        f.write('x.clear()\n\n')
-    elif l[:5].lower()=='color':
-        vcscmd+=1
-        cmap=l[6:].split(')')[0]
-        f.write("x.setcolormap('"+cmap+"')\n\n")
-    elif l[:6].lower()=='canvas':
-        vcscmd+=1
-        if l[7:-1]=='open':
-            f.write('x.open()\n\n')
-        elif l[7:-1]=='close':
-            f.write('x.close()\n\n')
-    elif l[:2]=='A_':
-        arrays+=1
-        # Acquiring Array data
-        # First reconstruct the full name
-        j=1
-        while l[-2]!=')' and l[-1]!=')':
-            l=l[:-1]+ln[i+j]
-            j+=1
-        l=l.replace('\n','')
-        nm=extract(l,'A_','(')
-        pnm=nm.replace('.','_') # . are not acceptable in python names
-        if pnm!=nm:
-            # Now replace in every over possible lines !
-            for j in range(i,n):
-                ln[j]=ln[j].replace(nm,pnm)
-        fnm=extract(l,'File=')
-        src=extract(l,'Source=')
-        vr=extract(l,'Name=')
-        tit=extract(l,'Title=')
-        units=extract(l,'Units=')
-        xnm=extract(l,'XName=')
-        xfirst=extract(l,'xfirst=')
-        xlast=extract(l,'xlast=')
-        ynm=extract(l,'YName=')
-        yfirst=extract(l,'yfirst=')
-        ylast=extract(l,'ylast=')
-        znm=extract(l,'ZName=')
-        zfirst=extract(l,'zfirst=')
-        zlast=extract(l,'zlast=')
-        tnm=extract(l,'TName=')
-        tfirst=extract(l,'tfirst=')
-        tlast=extract(l,'tlast=')
-        func=extract(l,'Function="','"')
-        cmd=''
-        
-        if not fnm is None:
-            arrays_from_file+=1
-            cmd+='f = cdms.open('+fnm+')\n'
-            cmd+=pnm+' = f('+vr
-            if fnm[-5:-1]=='.dic':
-                if not tnm is None: tnm=tnm[:-1]+'_'+vr[1:]
-                if not znm is None: znm=znm[:-1]+'_'+vr[1:]
-                if not ynm is None: ynm=ynm[:-1]+'_'+vr[1:]
-                if not xnm is None: xnm=xnm[:-1]+'_'+vr[1:]
-
-        elif not func is None:
-            # First of all treats the special commands (mean and sqrt)
-            # Mean ?
-##             if func[:-1]!=')':
-##                 func=func+')'
-            imean=func.find('mean(')
-            while imean!=-1 :
-                tmp=func[imean:]
-                tmp=tmp.replace('mean(','cdutil.averager(',1)
-                tmp=tmp.split(',')
-                tmp2=tmp[1]
-                fpar=tmp2.find('\'')
-                lpar=tmp2[fpar+1].find('\'')
-                tmp3=tmp2[fpar+1:lpar].lower()
-                if tmp3=='time':
-                    tmp3="axis='t')"
-                elif tmp3=='longitude':
-                    tmp3="axis='x')"
-                elif tmp3=='latitude':
-                    tmp3="axis='y')"
-                elif tmp3=='level':
-                    tmp3="axis='z')"
-                else:
-                    tmp3="axis='("+tmp2[fpar+1:lpar-1]+")'"+tmp2[lpar:]
-                tmp[1]=tmp3
-                tmp=','.join(tmp)
-                func=func[:imean]+tmp
-                imean=func.find('mean(')
-            isqrt=func.find('sqrt(')
-            while isqrt!=-1:
-                warnings+=1
-                print 'WARNING FOR ARRAY:'+pnm+'\nsqrt FUNCTION FOUND YOU NEED TO REPLACE AXIS NAME WITH CORRECT VALUE !'
-                tmp=func[isqrt:]
-                tmp=tmp.replace('sqrt(','MV.xxxx(',1)
-                tmp=tmp.split(',')
-                if len(tmp)>1:
-                    tmp2=tmp[1]
-                    fpar=tmp2.find('\'')
-                    lpar=tmp2[fpar+1].find('\'')
-                    tmp3="axis='("+tmp2[fpar+1:lpar-1].lower()+")'"
-                    tmp[1]=tmp3
-                else:
-                    tmp[0]+=')'
-                tmp=','.join(tmp)
-                func=func[:isqrt]+tmp
-                isqrt=func.find('sqrt(')
-            func=func.replace('MV.xxxx','MV.sqrt')
-            cmd+=pnm+' = '+func+'\n'+pnm+' = '+pnm+'('
-        else:
-            raise 'Error array'+nm+' is coming neither from file nor function !'
-        # Now does the dimensions needed
-        order=''
-        if not tnm is None:
-            order+='('+tnm[1:-1]+')'
-            if not tfirst is None:
-                tcmd=tnm[1:-1]+'=('+tfirst+','+tlast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+tcmd
-                else:
-                    cmd+=tcmd
-        if not znm is None:
-            order+='('+znm[1:-1]+')'
-            if not zfirst is None:
-                zcmd=znm[1:-1]+'=('+zfirst+','+zlast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+zcmd
-                else:
-                    cmd+=zcmd
-        if not ynm is None:
-            order+='('+ynm[1:-1]+')'
-            if not yfirst is None:
-                ycmd=ynm[1:-1]+'=('+yfirst+','+ylast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+ycmd
-                else:
-                    cmd+=ycmd
-        if not xnm is None:
-            order+='('+xnm[1:-1]+')'
-            if not xfirst is None:
-                xcmd=xnm[1:-1]+'=('+xfirst+','+xlast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+xcmd
-                else:
-                    cmd+=xcmd
-        if order!='':
-            cmd+=",order='..."+order+"'"
-        cmd+=')\n'
-        if not fnm is None:
-            cmd+='f.close()\n'
-        if not src is None:
-            cmd+=pnm+'.source = '+src+'\n'
-        if not tit is None:
-            cmd+=pnm+'.title = '+tit+'\n'
-        if not units is None:
-            cmd+=pnm+'.units = '+units+'\n'
-        
-        # Now does the attributes that are overwrittable
-        for att in ['source','name','units','crdate','crtime',
-                    'comment#1','comment#2','comment#3','comment#4']:
-            val=extract(l,att+'="','"')
-            Att=att.replace('#','')
-            if not val is None:
-                cmd+=pnm+'.'+Att+' = "'+val+'"\n'
-        cmd+='\n'
-        cmd=cmd.replace('"',"'")
-        cmd=cmd.replace('(,',"(")
-        f.write(cmd)
-    elif l[:2]=='D_':
-        plots+=1
-        # Plotting data
-        # First reconstruct the full string
-        j=1
-        while l[-2]!=')' and l[-1]!=')':
-            l=l[:-1]+ln[i+j]
-            j+=1
-        l=l.replace('\n','')
-        off=extract(l,'off=',',')
-        if int(off)==0: # Ok it's not off, let's draw it
-            cmd=''
-            active_plots+=1
-        else:
-            cmd='## Next line commented, display was "off"\n## '
-        type=extract(l,'type='    ,',')
-        if type is None: type = 'boxfill'
-        tmpl=extract(l,'template=',',')
-        if tmpl is None: tmpl='default'
-        mthd=extract(l,'graph='   ,',')
-        if mthd is None: mthd='default'
-        a   =extract(l,'a='       ,',')
-        b   =extract(l,'b='       ,',')
-        cmd+='x.plot('+a+', '
-        if not b is None:
-            cmd+=b+' ,'
-        cmd+="'"+tmpl+"', '"+type+"', '"+mthd+"'"+overlay_continents+")\n\n"
-        f.write(cmd)
-f.close()
-print 'Successfully converted:',src
-print 'Processed:'
-print 'VCS Commands:',vcscmd
-
-print 'Arrays:',arrays,':',arrays_from_file,'from file and',arrays-arrays_from_file,'computed'
-print 'Plots:',plots
-print 'Active plots:',active_plots
-print 'Warnings:',warnings
-print 'VCS OBJECTS (templates, graphic methods, etc..):',vcs_objects
-
diff --git a/images/vcs_icon.xbm b/images/vcs_icon.xbm
deleted file mode 100644
index 912510e7786d0a4f6da3003612c6cabea7ec204e..0000000000000000000000000000000000000000
--- a/images/vcs_icon.xbm
+++ /dev/null
@@ -1,566 +0,0 @@
-#define splash_width 492
-#define splash_height 136
-static char splash_bits[] = {
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf4,0xff,0x3f,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0xff,0x0f,0xfc,0xff,0xbf,
- 0xee,0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0xfd,0xfb,0xff,0x52,0xff,
- 0x7f,0xeb,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x57,0xb8,0xaf,0x00,
- 0xef,0xff,0xd7,0x7f,0xdf,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xe0,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6f,
- 0x08,0xfd,0x7f,0x6b,0xfd,0xda,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x16,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,
- 0xf9,0x12,0x80,0xaf,0xdb,0xff,0xb7,0xff,0xee,0x17,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0xe9,0xab,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2a,0xfc,0x3f,
- 0x00,0xf0,0x4f,0x02,0x00,0x00,0xfc,0xd6,0xfe,0xad,0xbf,0x1d,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x5f,0xbd,0x07,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x3a,
- 0x74,0x15,0x00,0xa0,0x1f,0x01,0x00,0x00,0xf8,0xff,0x6b,0xff,0xea,0x0b,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x7f,0xeb,
- 0xd6,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0xd7,0xa9,0x80,0x8f,0xbe,0x2a,0x04,0x00,0x00,0x78,0x6d,0xff,0xd5,0x7f,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x00,0xf4,
- 0xd7,0xb6,0xb5,0x5a,0x2f,0xad,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf8,0xfe,0x01,0x7a,0xf8,0xff,0x5f,0x00,0x00,0xdc,0xfb,0xad,
- 0x7f,0xdb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,
- 0xf2,0xdf,0x6a,0x5b,0xdb,0xed,0xd5,0xb7,0x8b,0xfb,0xff,0x2d,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0xd0,0xff,
- 0xbf,0x00,0x00,0x14,0x00,0xec,0x7f,0x01,0x3e,0xa0,0xd5,0xbf,0x01,0x00,0x68,
- 0x5f,0xff,0xed,0x57,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x07,0x00,0x00,0x00,
- 0x20,0x40,0x01,0xa8,0xbd,0xed,0xad,0xb6,0xda,0xda,0xfa,0x6e,0xad,0xf6,0xff,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x60,
- 0xff,0xff,0xff,0xff,0xfe,0xff,0xff,0x57,0x5b,0xf8,0x7b,0x70,0x00,0xf5,0x01,
- 0x00,0xac,0xf7,0x6b,0x77,0x01,0x00,0x00,0x00,0x00,0x00,0x70,0xab,0xf4,0x13,
- 0x20,0x20,0x91,0xbe,0x00,0xfc,0xd6,0xb6,0xf6,0x5b,0x6f,0xb7,0xae,0xb5,0x6a,
- 0x55,0x95,0xfe,0xff,0xff,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0xf0,0xff,0xbf,0xfb,0xff,0xff,0xff,0xff,0x23,0x3f,0xf0,0xde,0x1d,0x00,
- 0xdc,0x00,0x00,0xd8,0xdd,0xfe,0xd3,0x00,0x00,0x00,0x00,0x00,0x00,0xdf,0x95,
- 0x52,0xed,0x60,0xf0,0x7f,0x55,0x02,0x54,0xbb,0xad,0x2d,0xed,0xb5,0xad,0x75,
- 0x5b,0x57,0x5b,0xd5,0xea,0x5f,0x8a,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0xe0,0xbf,0xff,0xef,0xff,0xff,0xf7,0xbe,0xff,0xf7,0xbf,0x7f,
- 0x5d,0x00,0xf5,0x0f,0x00,0x7e,0xff,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x56,0xa8,0xaa,0x92,0x22,0x5c,0xaa,0x90,0x02,0xea,0xd6,0xf6,0xf6,0xaa,0x5a,
- 0xf5,0xaa,0xd6,0xb9,0x6d,0x5b,0x35,0x69,0x6b,0xa5,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0xe0,0x7f,0xfb,0xf6,0xff,0xb6,0xef,0xdf,0xf7,0xfe,0xde,
- 0xff,0xf6,0x02,0xe4,0x5f,0x08,0x00,0xee,0x2d,0x05,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x80,0x5f,0x83,0x95,0x02,0x94,0x52,0x55,0x2a,0x01,0x5c,0x7b,0x5b,0x5b,
- 0xbf,0xed,0x56,0xb7,0x6b,0xae,0xaa,0xaa,0xd6,0x56,0xad,0xaa,0x05,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xff,0xfd,0xfe,0xff,
- 0xdb,0x7f,0xff,0x8a,0x07,0xd0,0xff,0x00,0x00,0xbc,0x3f,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xe0,0xa5,0xc0,0x52,0x49,0xa9,0x2a,0x11,0x85,0x04,0xe8,0xad,
- 0xad,0xed,0xd2,0x56,0xbb,0x5a,0xb5,0xab,0xd5,0xd6,0xaa,0xaa,0xaa,0xaa,0x2a,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfc,0xff,0xff,0xdf,0xbb,0xff,0xff,
- 0xff,0xee,0x7f,0xf7,0x0b,0xa0,0x00,0x80,0x0f,0x00,0x00,0xf6,0x05,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xbc,0x2a,0x68,0xab,0xaa,0x94,0x8a,0xaa,0x28,0x09,
- 0xb4,0xd6,0xf6,0x56,0x6f,0xfb,0xd5,0xad,0xad,0xd4,0x6e,0x5b,0x55,0xdb,0xaa,
- 0x56,0x95,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfd,0xbb,0xb7,0xff,0xff,
- 0xbb,0xb7,0xdb,0x7f,0xff,0xfd,0x4d,0x00,0x00,0x05,0x44,0x00,0x00,0xbc,0x0b,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6d,0x95,0x54,0x49,0x2a,0x55,0x52,0x44,
- 0xa5,0x02,0x58,0x7b,0x5b,0xbb,0xb5,0xaa,0xae,0xf6,0xd6,0x6e,0xb5,0x6a,0x6b,
- 0x55,0x7a,0x59,0x15,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xc0,0xff,0xff,0xfb,
- 0xfb,0xef,0xff,0xfe,0xff,0xfb,0xed,0x5f,0x00,0x00,0xf0,0x3f,0x00,0x00,0x00,
- 0xf8,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x97,0x2a,0xa0,0xaa,0x52,0x49,
- 0xa9,0x2a,0x12,0x04,0xe8,0xad,0xad,0x6d,0xdd,0x57,0xb5,0x95,0x6a,0x55,0x55,
- 0x55,0xad,0x2a,0x20,0xa6,0x0a,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xd0,0xff,
- 0x57,0x41,0x7d,0xff,0xfe,0xdf,0x77,0xdf,0xbf,0x3b,0x00,0x00,0xe0,0x2f,0x10,
- 0x00,0x00,0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xaa,0x2a,0x80,0x40,
- 0x4a,0x25,0x45,0x52,0xa9,0x10,0xb5,0xd6,0xf6,0xb6,0x6b,0xed,0xdb,0x76,0xb7,
- 0xb6,0xad,0xad,0xaa,0x2a,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x5a,0x00,0x00,0xe0,0xfd,0xef,0xff,0xfe,0xff,0xfb,0x3f,0x00,0x00,0xfc,
- 0x7d,0x39,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x87,0x25,
- 0x00,0xaa,0x2a,0x95,0x2a,0x25,0x05,0x44,0xdd,0x7b,0x5b,0xdb,0xb6,0x36,0xad,
- 0xda,0x5a,0xdb,0xb6,0xd6,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x80,0x95,0x00,0x00,0xc0,0xb7,0xfd,0xf6,0xdf,0x76,0xef,0xb6,0x00,
- 0x00,0x78,0xb7,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0e,0x00,
- 0x80,0x1a,0xe0,0x4a,0xa9,0x54,0x92,0x94,0x08,0x58,0x6b,0xad,0x6d,0x6d,0x5b,
- 0xdb,0x76,0x57,0xab,0x55,0x55,0x55,0x00,0x00,0x80,0x55,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x40,0x2b,0x00,0x00,0xe0,0xff,0xff,0xff,0xfe,0xff,0xff,
- 0x7f,0x01,0x00,0xd8,0xff,0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x1e,0x00,0x20,0x2b,0xa0,0x2a,0xa5,0x52,0x55,0x4a,0x05,0xd4,0xb6,0xed,0xd6,
- 0xb6,0xed,0x6d,0xdb,0x6a,0xb5,0xaa,0xda,0xda,0x00,0x00,0x80,0xae,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x08,0x00,0x00,0x00,0x40,0xff,0xb7,0xdf,0xf7,
- 0xdf,0x7d,0xb7,0x0b,0x00,0x7e,0xdb,0x37,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x02,0x00,0x30,0x00,0xe0,0x54,0x95,0x4a,0x22,0x21,0x12,0xb4,0xdd,
- 0x56,0x7b,0xdb,0xb6,0xaa,0x55,0x5d,0x5b,0xbb,0x6b,0xab,0x00,0x00,0x00,0xbb,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x90,0xdd,0xfe,
- 0xfb,0x7f,0xfb,0xef,0xfd,0x77,0xd1,0xff,0xff,0x7d,0x01,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xa0,0x09,0x00,0x30,0xc2,0x6b,0xa5,0x54,0x29,0x55,0x55,0x09,
- 0x6c,0x6b,0xfb,0xaa,0x6d,0xd5,0xde,0xb6,0xab,0xad,0x55,0x55,0xad,0x02,0x18,
- 0x00,0x60,0x01,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc8,
- 0xff,0xff,0xfe,0xee,0x6f,0xff,0xbf,0xff,0xf0,0xd7,0x6b,0xef,0x05,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf8,0x20,0x00,0x7f,0x55,0x95,0x2a,0x25,0x95,0x24,
- 0x8a,0x00,0xda,0xdd,0x96,0xdd,0xb6,0x5e,0x6b,0x6d,0x75,0xd5,0xda,0x5a,0xb5,
- 0xfa,0x03,0x00,0xc0,0x03,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x80,0xf7,0xdb,0xdf,0xff,0xfe,0x75,0xef,0xbd,0xf0,0xfd,0xfe,0x7d,0x03,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xa8,0xc1,0xab,0x52,0x55,0x52,0x95,
- 0xaa,0x52,0x15,0x4a,0x6c,0x6b,0x7b,0x6b,0xdb,0xb5,0xd5,0xb6,0xad,0x6d,0x6d,
- 0xab,0x55,0xad,0x86,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x80,0xbf,0xff,0x7b,0xf7,0xfb,0xff,0xfb,0xf7,0x7e,0xdf,0xb7,
- 0xdf,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5c,0x5c,0x64,0x95,0x2a,0xa5,
- 0x4a,0xa9,0x44,0x8a,0x08,0x28,0xb7,0xbd,0xd5,0xb6,0x6d,0xdb,0x6e,0xab,0x56,
- 0xb5,0x56,0xb5,0xd6,0xaa,0x8a,0x03,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0xe0,0xfd,0xfe,0xff,0xbf,0x6f,0xbf,0xdf,0xde,0xff,
- 0x7b,0x4f,0x3d,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xf1,0xaa,
- 0xaa,0xaa,0x2a,0x55,0x32,0x55,0x25,0xa4,0xda,0xd6,0x6e,0xdb,0xb6,0x6d,0xb5,
- 0xdd,0xda,0x56,0x6b,0x5b,0x55,0x6b,0x0b,0x06,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xea,0xd7,0xde,0xfb,0xff,0xed,0xfd,
- 0xff,0x6e,0xef,0x05,0x80,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x04,
- 0x2c,0xaa,0xaa,0x54,0xaa,0x24,0x95,0x48,0x12,0xd8,0x6f,0xbb,0xb5,0xad,0xda,
- 0xb6,0xdb,0xaa,0x6d,0xb5,0xad,0xd5,0x6a,0xad,0x0a,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0x7b,0xdf,0xfd,
- 0xbf,0x6f,0xf7,0xfb,0x7f,0x0f,0xe0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0xa5,0x55,0xaa,0xaa,0x92,0xaa,0x4a,0x25,0x09,0x62,0xf5,0xd6,0xde,
- 0xf6,0x6b,0xd5,0x56,0xdd,0x56,0x5b,0xb5,0xaa,0x56,0x55,0x15,0x10,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x77,0xff,0xff,
- 0x7d,0xb7,0xfb,0xfe,0xbd,0xbf,0xed,0x05,0x01,0x0a,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xc0,0xb7,0x54,0x49,0x95,0xaa,0x4a,0xa9,0x94,0x2a,0xa8,0xae,
- 0x7b,0x6b,0x9b,0xb6,0xbb,0x7a,0x6b,0xb5,0xd5,0xd6,0xb6,0xaa,0x6a,0x2b,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0xff,
- 0x5b,0xef,0xff,0xff,0xff,0xf7,0xf7,0xee,0xbf,0x4f,0x03,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x40,0x93,0xaa,0xa6,0x52,0x55,0x51,0x94,0x52,0x12,
- 0xc0,0x75,0xad,0xb5,0x6d,0xdb,0xd6,0xad,0x55,0x5b,0xad,0xaa,0xaa,0x6d,0x5b,
- 0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0xde,0xff,0xff,0xef,0xdf,0xde,0x5f,0xdf,0x7b,0xdb,0x15,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xab,0xaa,0x42,0x4a,0x29,0x2d,0x4b,
- 0x29,0x25,0x60,0xbf,0xed,0xde,0xb6,0x6d,0xbb,0xd6,0xbe,0xd5,0xb6,0x6d,0x5b,
- 0xb5,0xaa,0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x7f,0xff,0xbb,0xbe,0xfb,0xf7,0xfd,0xff,0xef,0x05,0x06,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x55,0xc5,0x8d,0x2b,0xa5,
- 0xc2,0x28,0x95,0x04,0xa8,0xff,0x5a,0x6b,0xdb,0xb6,0x6d,0x6b,0xd3,0x6e,0xd5,
- 0x56,0xad,0xaa,0xd5,0x0a,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xf8,0xfb,0xb7,0xff,0xfb,0x7f,0xbf,0xff,0xed,0xfe,0x01,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0xff,0x0b,0x00,0x0b,
- 0x7c,0x55,0x35,0xa5,0xa4,0x02,0xd4,0xbf,0xa2,0xbb,0x6d,0xdb,0xb6,0xbd,0x6d,
- 0xb5,0xb6,0xda,0x6a,0xad,0xb6,0x00,0xe0,0x04,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xef,0xed,0xfb,0x6e,0xbf,
- 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x68,0xac,0x08,
- 0x00,0xf0,0xa0,0xa2,0x95,0x2a,0x70,0x09,0xd5,0x57,0xe8,0x6d,0xb7,0x6d,0xdb,
- 0xd6,0xb6,0x56,0x5b,0x6b,0xab,0xb5,0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7d,0xb7,0xfd,0xb7,0xbf,0xff,0xff,
- 0xff,0xfb,0x85,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,
- 0x55,0x05,0x00,0xe8,0xc0,0xd2,0x55,0x95,0xd0,0x05,0xaa,0xaf,0xa0,0xab,0xdd,
- 0xb6,0x6d,0x6b,0xdb,0xeb,0xaa,0x55,0xb5,0xaa,0xd5,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0xef,0xff,0xfe,
- 0x7f,0xb7,0xdb,0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xac,0xaa,0x00,0x40,0x00,0x91,0x02,0xc2,0x25,0xf4,0x96,0xb6,0x5b,0xfd,
- 0xaf,0xb6,0xdb,0xb6,0xba,0x55,0x5d,0xdb,0xba,0x56,0x81,0x5a,0x00,0x00,0x06,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xdf,0xff,0x7f,
- 0xff,0xfb,0xed,0xff,0x7e,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x78,0x55,0x00,0x20,0x80,0x00,0x03,0xe9,0x0b,0xe8,0x6f,0x5b,
- 0x35,0xfe,0xc3,0xfd,0x6f,0xdb,0xd7,0xee,0xaa,0xad,0x56,0xab,0x01,0x70,0x01,
- 0x00,0x0e,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7f,
- 0xdb,0xfd,0xed,0xbf,0xff,0xfd,0xfb,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf8,0x0b,0x00,0x00,0x60,0x00,0x45,0xff,0x85,0x52,
- 0xb5,0xed,0xde,0x5e,0xd5,0xff,0xbf,0x56,0x6d,0x5b,0xdb,0x76,0xab,0xb5,0x2a,
- 0x80,0x06,0x00,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x80,0xff,0xff,0xf7,0xbf,0xff,0xbe,0xdf,0xef,0x37,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x05,0xfa,0xff,0x00,0x00,0x00,0x10,
- 0xc6,0x7f,0x6f,0xb7,0xb5,0x76,0xd5,0xff,0xae,0xbb,0xb6,0xb5,0xad,0xaa,0x6d,
- 0xd5,0x5a,0x00,0x06,0x00,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0xf7,0xff,0xdf,0xff,0xf7,0xf7,0x06,0x04,0x80,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x06,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0xde,0x6d,0xff,0xfe,0xde,0xff,0x07,0xbf,0xcf,0x03,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x00,0x00,0x80,0x07,0x00,
- 0x00,0xf0,0x00,0x00,0x70,0x00,0x00,0xfc,0x0f,0x00,0x00,0x1c,0x00,0x00,0xf0,
- 0xf8,0xfc,0x01,0x1e,0xff,0x1f,0x00,0x04,0x70,0x15,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0xff,0xf7,0xff,0xff,0x87,0x7f,
- 0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x03,0x00,0x00,0x00,0x00,0x80,
- 0x07,0x00,0x00,0x70,0x00,0x00,0x78,0x00,0x00,0xfc,0x07,0x00,0x00,0x1e,0x00,
- 0x00,0x78,0xfc,0xfd,0x07,0x9f,0xff,0x1f,0x00,0x00,0xac,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0xfb,0xff,0x7f,0xdb,
- 0xc6,0xff,0x8f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x07,0x00,0x00,0x00,
- 0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x30,0x00,0x00,0xfc,0x0f,0x00,0x00,
- 0x1e,0x00,0x00,0x78,0xfe,0xfd,0x0f,0x3f,0xdf,0x3f,0x00,0x80,0x07,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0xbc,0x6f,0xdf,
- 0xfb,0xff,0xc7,0x63,0x0f,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xbc,0x07,0x00,
- 0x08,0x00,0x00,0x80,0x07,0x00,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0xe0,0x01,
- 0x00,0x00,0x1e,0x00,0x00,0x3c,0xdf,0xdd,0x0f,0x3f,0x7c,0x7c,0x00,0x00,0x0a,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0xf6,
- 0xff,0x7f,0xff,0x5f,0xe2,0x83,0x0f,0x00,0x00,0x00,0x80,0x03,0x00,0x00,0x3c,
- 0x0f,0x00,0x1c,0x00,0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0x01,0x00,0x00,0x1e,0x00,0x00,0x1c,0x9f,0x7d,0x9f,0x3f,0x7c,0x78,0x08,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x40,0xfd,0xfe,0xfd,0x5f,0x3d,0xe1,0x03,0xcf,0xf1,0xf3,0xc0,0x81,0x0f,0x07,
- 0x00,0x3c,0x0f,0x06,0x1e,0x1c,0x00,0xc0,0x0f,0x32,0x60,0xf0,0x04,0xc1,0x20,
- 0x10,0x00,0xe0,0x01,0x02,0x08,0x1e,0x04,0x00,0x3e,0x0f,0x7c,0x9f,0x3f,0xfc,
- 0x78,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x80,0xfc,0xff,0xff,0x05,0x00,0xe0,0x83,0xef,0xfb,0xff,0xf1,0xc7,
- 0xcf,0x0f,0x00,0x34,0x8e,0x1f,0x7e,0x3e,0x00,0xc0,0x8f,0x7f,0xf8,0x71,0xce,
- 0xf3,0x79,0x7e,0x00,0xe0,0xc1,0x0f,0x3f,0xbe,0x1f,0x00,0x9e,0x0f,0x3c,0x9b,
- 0x7f,0x7c,0xf8,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xe0,0xe0,0xef,0xff,0x01,0x80,0xe0,0x01,0xcf,0xf3,0xff,
- 0xfb,0xe7,0xcf,0x1f,0x00,0x3c,0xcf,0x3f,0x7f,0x7f,0x00,0xe0,0x8f,0x7f,0xfc,
- 0xf1,0xde,0xfb,0x79,0x7e,0x00,0xe0,0xe1,0x1f,0x7f,0xde,0x1f,0x00,0x9e,0x0f,
- 0x7c,0x9f,0x7f,0xfc,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xc3,0x7e,0xf7,0x22,0x09,0xe2,0x83,0xcf,
- 0xfb,0xde,0x73,0xcf,0xef,0x1d,0x00,0x3c,0xce,0x3d,0x1f,0x77,0x00,0xe0,0x8e,
- 0xf7,0xdc,0xf3,0xde,0xfb,0x79,0x6f,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00,
- 0x9e,0x0f,0x7c,0x9f,0x7d,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xa1,0xff,0xbf,0x08,0x20,0xe0,
- 0x01,0xef,0xfb,0xde,0x1b,0xcf,0xe3,0x1f,0x00,0x3c,0xcf,0x38,0x1e,0xf1,0x00,
- 0xe0,0x8e,0xf7,0xc4,0x71,0xfc,0x79,0x78,0x1e,0x00,0xe0,0xf1,0x9e,0xff,0xde,
- 0x17,0x00,0x9f,0x0f,0x7c,0xdf,0x7f,0xfc,0xf8,0x80,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x83,0xfb,0x7f,0x81,
- 0x04,0xe1,0x83,0xcf,0x73,0xde,0xdb,0x87,0xe3,0x1f,0x00,0x3c,0x4f,0x3e,0x0e,
- 0x78,0x00,0xe0,0x9e,0xf3,0xc4,0xf3,0xfc,0xf9,0x78,0x3e,0x00,0xe0,0xf1,0xde,
- 0xf7,0xbe,0x0f,0x00,0x9e,0x0f,0x5c,0xdf,0xff,0x7c,0xf8,0x80,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0xff,
- 0xbf,0x50,0x50,0xe4,0x8b,0xef,0x7b,0xdf,0xe3,0xcf,0xe3,0x1f,0x00,0x3c,0x1f,
- 0x3f,0x1e,0xfe,0x00,0xe0,0x1f,0xd7,0xf0,0xf3,0xfc,0xf1,0x78,0x3c,0x00,0xe0,
- 0xf1,0xbe,0xf7,0x9e,0x0f,0x00,0x9f,0x9f,0x7c,0xdf,0xdf,0x7c,0xf9,0x81,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xbe,0x3b,0x05,0x05,0xe1,0x03,0xcf,0xfb,0xde,0xf3,0xcf,0xe3,0x15,0x00,
- 0x3c,0xcf,0x3f,0x0e,0x7f,0x00,0xf0,0x9f,0xf7,0xfc,0x73,0xf8,0xe0,0x79,0x7c,
- 0x00,0xe0,0xf1,0xde,0xf7,0x3e,0x1f,0x00,0x1e,0x1f,0x7c,0xdf,0xff,0xfc,0xf8,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xfc,0xdf,0xa8,0xfc,0xc2,0xe3,0xef,0x7b,0xde,0x7b,0x8f,0xe3,
- 0x11,0x00,0x3c,0xcf,0x3b,0x0e,0xf7,0x00,0xf0,0x9f,0xf7,0xdc,0xf1,0xf8,0xc8,
- 0x79,0x79,0x00,0xe0,0xf1,0xbe,0xbf,0xde,0x3e,0x00,0x1f,0x1f,0x7f,0xff,0xff,
- 0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xff,0xbf,0x12,0x7f,0xc0,0x7f,0xcf,0xf3,0xdf,0xfb,
- 0xdf,0xcb,0x1b,0x00,0xfc,0xc7,0x7d,0xbe,0xf7,0x01,0x70,0x9c,0x77,0xde,0xf7,
- 0xf0,0xdc,0x7b,0xf7,0x00,0xe0,0xe1,0x9f,0xff,0xfe,0x3f,0x00,0x1e,0xff,0xfd,
- 0xef,0xfb,0xfc,0xf8,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xa5,0xbe,0xc6,0xff,0xef,0x7b,
- 0xde,0xfb,0x9f,0xdf,0x1f,0x00,0xfc,0xc7,0x7f,0x7e,0xff,0x01,0x70,0xbc,0xf6,
- 0xfc,0xd7,0xf0,0xf8,0x7b,0x7e,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00,0x3e,
- 0xfe,0xff,0xef,0xfb,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xbf,0xfb,0x57,0x81,0x7f,
- 0xcf,0xfb,0xff,0x73,0xdf,0x9f,0x1f,0x00,0xfc,0xc3,0x7f,0x7e,0xbf,0x01,0x70,
- 0xbc,0xf3,0xfc,0x77,0x7a,0xfc,0x79,0x7f,0x00,0xe0,0xe1,0x0f,0x7d,0xde,0x1f,
- 0x00,0x1e,0xfe,0xff,0xe7,0xf3,0xfd,0xf8,0x00,0x02,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x2f,
- 0x05,0xbe,0xef,0x73,0xde,0xe3,0x0e,0x0f,0x1f,0x00,0x6c,0x00,0x33,0x38,0xee,
- 0x00,0x78,0xb8,0xd7,0xb8,0xf3,0x78,0xf1,0x78,0x3e,0x00,0xe0,0x83,0x17,0x3f,
- 0xbe,0x1f,0x00,0x3e,0xf8,0x7c,0xe1,0xfb,0xfd,0xf8,0x00,0x06,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x44,0xf4,
- 0xff,0xff,0x07,0x00,0x00,0x00,0x00,0x08,0x00,0x40,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x00,0x00,0xf8,0x00,0x06,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x09,0xf8,0xff,0x07,0x00,0x00,0x88,0x00,0x80,0x24,0x00,0x00,0x00,0x00,
- 0x08,0x48,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x08,0x00,0x00,0x00,
- 0x00,0x00,0x90,0xa0,0x00,0x00,0x00,0x3c,0x00,0x00,0x09,0x00,0x00,0x7c,0x00,
- 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x20,0xa4,0xf5,0xff,0x07,0x00,0x00,0x00,0x48,0x22,0x00,0x01,0x24,
- 0x00,0x00,0x41,0x00,0x02,0x90,0x00,0x00,0x10,0x40,0x00,0x24,0xbc,0x00,0x00,
- 0x40,0x00,0x00,0x08,0x04,0x04,0x92,0x24,0x00,0x3c,0x02,0x00,0x20,0xa9,0x00,
- 0x7c,0x02,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x89,0x52,0xaa,0xfe,0x07,0x00,0x48,0x21,0x02,0x00,0x12,
- 0x20,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x02,0x09,0x00,0x01,0x3c,
- 0x80,0x44,0x08,0x00,0x00,0x00,0x40,0x20,0x00,0x00,0x00,0x78,0x20,0x40,0x02,
- 0x00,0x20,0x3c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x50,0x55,0xe5,0x07,0x00,0x00,0x00,0x80,
- 0x00,0x00,0x00,0x08,0x00,0x00,0x02,0x01,0x00,0x40,0x00,0x00,0x00,0x00,0x11,
- 0x00,0x3c,0x00,0x00,0x00,0x00,0x00,0x20,0x08,0x80,0x00,0x00,0x00,0x78,0x80,
- 0x00,0x00,0x04,0x02,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0xaa,0xda,0x07,0x10,0x00,
- 0x00,0x00,0x08,0x08,0x00,0x00,0x00,0x00,0x00,0x04,0x20,0x00,0x00,0x00,0x08,
- 0x00,0x00,0x20,0x1e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,0x08,0x00,
- 0xf0,0x00,0x88,0x48,0x20,0x00,0x1f,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x24,0x84,0x54,0xd5,0x06,
- 0x80,0x00,0x00,0x08,0x40,0x40,0x04,0x00,0x00,0x00,0x00,0x80,0x80,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x80,0xb2,0xaa,
- 0xaa,0xff,0xff,0xff,0xff,0xff,0x57,0xab,0xf5,0x77,0xab,0xfd,0xde,0xba,0x5e,
- 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xdf,0xfb,
- 0xef,0xb6,0xed,0xfe,0xaa,0x5b,0x25,0x81,0x01,0x16,0x00,0x00,0x60,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,
- 0x88,0xaa,0xaa,0xea,0xff,0xff,0xff,0xbf,0xaa,0x56,0x95,0x54,0x55,0xa5,0x52,
- 0xd5,0x4a,0xfd,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
- 0x53,0x95,0x98,0xaa,0xaa,0x6a,0xb7,0xa4,0x12,0xa4,0x05,0x00,0x00,0x00,0x00,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x08,0xa5,0x54,0x55,0xad,0xfa,0xff,0xff,0xff,0x5f,0x55,0x55,0x55,0x55,
- 0x95,0xaa,0xaa,0xb2,0xf6,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
- 0xff,0xff,0x2a,0x55,0x67,0x55,0x55,0x55,0xaf,0xaa,0x94,0x00,0x0a,0x00,0x00,
- 0x00,0x40,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x40,0x92,0xaa,0xaa,0x52,0xf6,0xff,0xff,0xff,0xf7,0xaf,0xaa,
- 0xaa,0xaa,0x6a,0x55,0x55,0x55,0xd5,0xff,0xff,0xef,0xff,0xff,0xff,0xff,0xff,
- 0xff,0xff,0xff,0xff,0x55,0x55,0xa9,0xaa,0xaa,0xaa,0x5e,0x55,0x42,0x08,0x38,
- 0x00,0x00,0x78,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x02,0x28,0x91,0xaa,0xaa,0xfa,0xff,0xff,0xdb,0xfe,
- 0xbb,0xaa,0xaa,0xaa,0x2a,0x55,0x95,0xaa,0x54,0xfe,0xb5,0x92,0xff,0xff,0xff,
- 0xff,0xff,0xff,0xff,0xff,0x7f,0x55,0x55,0x95,0xaa,0xaa,0xaa,0xaa,0x92,0x14,
- 0xfc,0x38,0x00,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x85,0x54,0x49,0xaa,0x6a,0xf7,0xbf,
- 0xff,0xdf,0x6f,0x55,0x4a,0x55,0xa9,0x92,0x52,0x4a,0xa5,0x2a,0x55,0x55,0xda,
- 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x3f,0x55,0x49,0xaa,0x54,0x95,0x54,0x49,
- 0x4a,0x42,0x71,0x28,0x00,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x01,0x00,0x50,0xa5,0x54,0x49,0xfd,
- 0x7f,0xfb,0xff,0xff,0x7e,0x49,0xa9,0x24,0xa5,0xaa,0x54,0xa9,0x2a,0x95,0x94,
- 0x54,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x5f,0x29,0x55,0xa9,0x4a,0x52,
- 0x25,0xa5,0x24,0x11,0xc0,0x67,0x00,0x80,0x57,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0x00,0x09,0x12,0x92,
- 0x2a,0xff,0xff,0xff,0xbd,0xfb,0x5b,0x25,0x25,0x92,0x14,0x49,0x12,0x15,0x91,
- 0x44,0x4a,0x8a,0x52,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x42,0x45,0x2a,0x25,
- 0x22,0x89,0x92,0x54,0x92,0x08,0x80,0x4b,0x00,0xf8,0x5a,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x70,0xf8,0xff,0xa2,
- 0x48,0x49,0xa0,0xff,0xee,0xdf,0xf7,0xdf,0xff,0x92,0x48,0x49,0xa2,0x24,0xa5,
- 0x40,0x4a,0x29,0x21,0x51,0xc8,0xff,0xff,0xff,0xff,0xff,0xff,0x7f,0x29,0x28,
- 0x41,0x92,0x94,0x54,0x48,0x09,0x25,0x42,0x02,0x5e,0x00,0x56,0x15,0x10,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0xfe,0xff,0x22,0x92,0xca,0xef,0x7f,0x7b,0xdf,0x7d,0x5f,0x5f,0x92,0xa4,0x14,
- 0x92,0x48,0xaa,0x24,0x92,0x94,0x24,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0x2e,
- 0x45,0x45,0x2a,0x49,0x49,0x22,0x25,0x52,0x88,0x10,0x00,0x78,0x00,0xde,0x0a,
- 0x00,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0xc5,0xff,0x9f,0x44,0xe4,0xbf,0xfb,0xff,0xff,0xf7,0x77,0xeb,0x45,
- 0x12,0xa2,0x48,0x12,0x11,0x90,0x44,0x22,0x89,0xc8,0x7f,0xdf,0xfd,0xff,0xff,
- 0xf7,0xaf,0x90,0x10,0x81,0x10,0x22,0x89,0x88,0x04,0x21,0x42,0x00,0xa8,0x00,
- 0x54,0x0d,0x2c,0x00,0x20,0xc0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0xee,0x0f,0xf0,0x5f,0x10,0xa1,0xfb,0xdf,0xef,0x7b,0xdf,0xfd,
- 0x7f,0x29,0x80,0x08,0x02,0x40,0x44,0x05,0x20,0x08,0x20,0x82,0xff,0xff,0x77,
- 0xef,0xdf,0xff,0x17,0x0a,0x44,0x28,0x84,0x08,0x20,0x22,0x50,0x04,0x00,0x08,
- 0x70,0x00,0xa8,0x05,0x06,0x00,0xc0,0xe0,0x01,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0xb6,0x0c,0x00,0x3e,0x42,0xc8,0xfe,0x7e,0xbd,0xde,
- 0x7d,0xdf,0xda,0xd6,0x2b,0xa0,0x50,0x15,0x11,0xa0,0x8a,0x42,0x85,0x50,0xfe,
- 0xff,0xff,0xbf,0x7d,0xdf,0x89,0xa0,0x12,0x82,0x22,0xa2,0x8a,0x88,0x04,0x21,
- 0x09,0x00,0xa0,0x01,0x78,0x0f,0x16,0x80,0xc7,0x7e,0xfe,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xda,0x0e,0x8c,0x98,0x08,0xe2,0xaf,0xeb,
- 0xf7,0xfb,0xf7,0xf7,0xf7,0xff,0x45,0x05,0x04,0x40,0x00,0x09,0x00,0x10,0x10,
- 0x04,0xf0,0xfb,0xff,0xff,0xff,0xff,0x00,0x04,0x80,0x10,0x08,0x00,0x00,0x00,
- 0x10,0x04,0x20,0x00,0xc0,0x02,0x00,0x00,0x08,0x00,0x00,0xb4,0x7f,0x01,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xb8,0x42,0x1b,0x18,0x20,0xc0,
- 0xfb,0xbf,0xdf,0xdf,0xbe,0xba,0x5e,0x55,0xb7,0x10,0x00,0x00,0x44,0x00,0x20,
- 0x02,0x40,0x10,0xe1,0xef,0xff,0xfd,0xff,0x7f,0x48,0x10,0x10,0x42,0x20,0x11,
- 0x44,0x24,0x41,0x10,0x80,0x00,0x00,0x02,0x00,0x00,0x20,0x00,0x00,0x00,0xff,
- 0x04,0x14,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xac,0xa0,0x7c,0x18,
- 0x00,0xe4,0x7e,0xfb,0x7a,0xed,0xeb,0xef,0xf7,0xff,0x56,0x41,0x49,0x12,0x00,
- 0x44,0x09,0x88,0x04,0x40,0xb4,0xff,0xbd,0xff,0x7d,0x7b,0x02,0x40,0x02,0x00,
- 0x01,0x80,0x00,0x01,0x00,0x80,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xbf,0x52,0x0e,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xf4,0x20,
- 0xee,0x1b,0x82,0x80,0xdb,0xde,0xdf,0x7f,0xdf,0xba,0xba,0xaa,0xbb,0x00,0x00,
- 0x00,0x11,0x00,0x00,0x00,0x00,0x01,0xe0,0xff,0xf7,0xdf,0xef,0xef,0x00,0x01,
- 0x40,0x00,0x00,0x04,0x08,0x00,0x04,0x00,0x00,0x00,0x00,0xd0,0x0e,0x00,0x00,
- 0x00,0x00,0x80,0xaf,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x5c,0x00,0xbb,0x39,0x00,0x00,0xfe,0xeb,0xb6,0xd5,0xf6,0xef,0xdf,0xff,0xee,
- 0x02,0x00,0x00,0x40,0x00,0x40,0x00,0x00,0x00,0xc0,0xbd,0xff,0xfb,0xff,0x7f,
- 0x10,0x00,0x00,0x08,0x20,0x00,0x00,0x20,0x00,0x01,0x00,0x00,0x00,0x00,0x50,
- 0xa0,0x00,0x00,0x00,0x00,0x7a,0x78,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x60,0x34,0x70,0xed,0x19,0x00,0x00,0xde,0xbe,0x7d,0xbf,0xbb,0xba,0xea,
- 0xaa,0x5b,0x00,0x00,0x00,0x00,0x01,0x00,0x20,0x00,0x00,0xc0,0xf7,0x7f,0xff,
- 0xfd,0x7d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x60,0x38,0x50,0x5f,0x18,0x00,0x00,0xf4,0xef,0xd7,0xeb,0xde,
- 0xdf,0x7f,0xf7,0x56,0x00,0x80,0x00,0x00,0x08,0x00,0x80,0x10,0x00,0xc0,0xff,
- 0xf6,0xb7,0xb7,0xf7,0x04,0x08,0x02,0x00,0x02,0x20,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x60,0x0c,0xb8,0xf5,0x18,0x00,0x00,0xe0,0x7a,0x7d,
- 0xbd,0xab,0xea,0xaa,0xad,0x0d,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x04,
- 0x00,0xff,0xdf,0xff,0xff,0xff,0x02,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xa8,0xee,0x18,0x00,0x00,0xc0,
- 0xaf,0xd7,0x6f,0xfd,0x5d,0xf7,0xde,0x06,0x00,0x02,0x00,0x00,0x40,0x00,0x00,
- 0x00,0x00,0xc0,0xb7,0xff,0xdd,0xdd,0xdd,0x05,0x40,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0x01,0x70,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xf8,0x7b,0x19,0x00,
- 0x00,0x40,0xf5,0xbd,0xda,0x57,0xb7,0xad,0x75,0x01,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xe0,0xff,0x7b,0xff,0xff,0xf7,0x00,0x40,0x01,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5e,0x04,
- 0x18,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x10,0xb8,0x5e,
- 0x19,0x00,0x00,0x80,0x5f,0xeb,0xbd,0xfa,0xed,0xf6,0xae,0x03,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xfd,0xee,0xdb,0x6d,0xff,0x05,0x60,0x01,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,
- 0x57,0x01,0x58,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0x10,
- 0xec,0x6b,0x1c,0x00,0x00,0x00,0xfa,0x5e,0xeb,0x57,0xbb,0xad,0xf5,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xd7,0xff,0x7f,0xff,0xbd,0x00,
- 0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x80,0xaf,0x95,0x00,0x68,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0x00,0xb8,0x69,0x1c,0x00,0x00,0x00,0xd0,0xeb,0x5e,0xdd,0xd6,0x76,0x5b,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0xff,0x7f,0xff,0xdd,
- 0x6f,0x80,0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x70,0xa5,0x54,0x15,0x16,0x01,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0xe0,0x40,0xf8,0x28,0x1c,0x00,0x00,0x00,0x40,0xbf,0xeb,0xb6,0x6d,
- 0xab,0xad,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x7d,0xdb,
- 0xeb,0x7f,0x0b,0x80,0xbf,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0xb5,0xaa,0x24,0x5e,0x01,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xc0,0x00,0xe0,0x21,0x1c,0x00,0x00,0x00,0x00,0x6a,0xbd,
- 0x6d,0xbb,0xdd,0x76,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,
- 0xdf,0x7f,0x7f,0xf7,0x07,0x80,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x4a,0xa5,0x52,0xa1,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x20,0xe1,0x00,0x1c,0x00,0x00,0x00,0x00,
- 0xbe,0xd7,0xb6,0x6d,0x6b,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x60,0xfb,0xf6,0xf7,0x5f,0x00,0x00,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xae,0xaa,0x54,0x2a,0x55,0x01,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0xe1,0x80,0x1c,0x00,0x00,
- 0x00,0x00,0xd6,0x7a,0xdb,0xb6,0xbd,0xdd,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xc0,0xef,0xff,0xbe,0x7b,0x00,0xc0,0x2f,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x57,0x55,0x55,0x55,0x95,
- 0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0xc0,0x60,0x1c,
- 0x00,0x00,0x00,0x00,0x7c,0xad,0x6d,0xdb,0xd6,0x6a,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x80,0x7f,0xdb,0xfb,0x3e,0x00,0xe0,0x16,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x57,0x55,0x55,0xa5,
- 0x24,0x49,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x80,0xe0,
- 0x00,0x1d,0x00,0x00,0x00,0x00,0xac,0xd7,0xb6,0x6d,0x6b,0x37,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef,0xff,0xef,0x6f,0x00,0xc0,0x0b,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x55,0x55,
- 0x2a,0x95,0xaa,0x54,0x12,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x80,0xe0,0x00,0x1c,0x00,0x00,0x00,0x00,0xf6,0xba,0xdd,0xb6,0xb5,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xbd,0xdb,0xbe,0xbb,0x00,
- 0xe0,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70,
- 0x55,0x55,0x55,0x55,0x55,0x25,0x49,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x40,0x60,0x80,0x1d,0x00,0x00,0x00,0x00,0x5c,0xef,0x76,0xdb,0x1e,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xfe,0x7b,
- 0x6f,0x00,0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x5c,0x55,0x55,0x55,0x55,0x49,0x55,0x25,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x40,0x60,0x40,0x1f,0x00,0x00,0x00,0x00,0xee,0x5a,0xab,
- 0x6d,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef,
- 0xb7,0xff,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xb8,0xaa,0xaa,0xaa,0x54,0xaa,0xa4,0x94,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0xc0,0x1f,0x00,0x00,0x00,0x00,0xb4,
- 0xed,0x7d,0xab,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xbe,0xff,0xdb,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x54,0x55,0x55,0xa5,0xaa,0x4a,0x95,0x52,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa0,0x1e,0x00,0x00,0x00,
- 0x00,0xdc,0x56,0xab,0xdd,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xfb,0xdd,0xfe,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa,0xaa,0xaa,0x4a,0x29,0x55,0xaa,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x06,0x00,0xc0,0x1e,0x00,
- 0x00,0x00,0x00,0x76,0xfb,0x76,0x6b,0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x7e,0xff,0x6f,0x03,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x25,0x55,0xa9,0xaa,
- 0x52,0x85,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x00,
- 0x1e,0x00,0x00,0x00,0x00,0xdc,0xad,0xad,0xbd,0x09,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x75,0xfb,0x01,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x55,0x55,
- 0x55,0x55,0x4a,0x29,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x1e,0x00,0x00,0x00,0x00,0xb4,0xd6,0x76,0x6b,0x01,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0x1f,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa,
- 0xaa,0xae,0x2a,0x49,0x29,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xc0,0x07,0x00,0x1f,0x00,0x00,0x00,0x00,0xd8,0xbb,0xad,0x5d,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xdf,0x0e,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x54,0x55,0x3d,0xe0,0xaa,0xaa,0xa6,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x3f,0x00,0x1f,0x00,0x00,0x00,0x00,0xb8,0x6e,0xf7,0xb6,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xf6,
- 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xa8,0xaa,0x02,0x80,0xa9,0x2a,0x99,0x02,0x00,0x00,0x08,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x1e,0x00,0xf8,0x01,0x1f,0x00,0x00,0x00,0x00,0x78,0xdb,
- 0x5a,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0x7f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xbe,0x25,0x00,0x80,0x55,0xa9,0x54,0x00,0x00,0x00,0x5a,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x7c,0x00,0xe0,0x07,0x1f,0x00,0x00,0x00,0x00,
- 0xec,0x75,0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x24,0x55,0x25,0x00,0x00,
- 0x00,0x18,0xf0,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x80,0x3f,0x1f,0x00,0x00,
- 0x00,0x00,0xb8,0xde,0xda,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0x4a,0x05,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0x07,0x00,0x7e,0x1f,
- 0x00,0x00,0x00,0x00,0xdc,0xab,0x6d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb0,
- 0xaa,0x02,0x00,0x00,0x00,0x07,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00,
- 0x70,0x1e,0x00,0x00,0x00,0x00,0x6e,0x7d,0xdb,0x02,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x70,0x55,0x03,0x00,0x00,0x80,0x01,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xfc,0x00,0xc0,0x0e,0x00,0x00,0x00,0x00,0xb8,0xd7,0x4d,0x01,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x40,0x19,0x00,0x00,0x00,0xc0,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x03,0x00,0x07,0x00,0x00,0x00,0x00,0xf8,0x7a,0x07,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xc0,0x0f,0x00,0x08,0x00,0x00,0x00,0x00,0x68,0xaf,0x01,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x94,
- 0xda,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,
- 0x00,0xa0,0x77,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x07,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0xdd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x0f,
- 0x00,0x00,0x00,0x00,0x00,0xd0,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0xf0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,0x80,0xb6,0x04,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xe0,0x07,0x00,0x00,0x00,0x00,0x00,0x2f,0x01,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00,0x00,0x00,0x00,0x80,0xfb,
- 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00,
- 0x80,0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x00,0x00,
- 0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,
- 0x00,0x00,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0x15,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0a,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0};
diff --git a/images/viewmag+.gif b/images/viewmag+.gif
deleted file mode 100644
index 6daba5233321e77718e8708feb51c4fc5fb51af0..0000000000000000000000000000000000000000
Binary files a/images/viewmag+.gif and /dev/null differ
diff --git a/images/viewmag-.gif b/images/viewmag-.gif
deleted file mode 100644
index e2dc98fa2744235c6eaf67e880b70cc4d4deaf06..0000000000000000000000000000000000000000
Binary files a/images/viewmag-.gif and /dev/null differ
diff --git a/images/viewmag-.png b/images/viewmag-.png
deleted file mode 100644
index 8108ecd7b08e2e49f9c421ac3dcdbe18c28e94ec..0000000000000000000000000000000000000000
Binary files a/images/viewmag-.png and /dev/null differ
diff --git a/installation/DAP.py b/installation/DAP.py
deleted file mode 100644
index ca1edb37860770cb6f702c1a4704a3aa3ce26456..0000000000000000000000000000000000000000
--- a/installation/DAP.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Edit this configuration file before building.
-# Always build with --force after changing a configuration.
-# You do not need to rebuild Python itself.
-#print 'DAP'
-
-import os
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-
-sys.path.append(src_dir)
-sys.path.append(installation_script_dir)
-
-CDMS_INCLUDE_DAP='yes'
-CDMS_DAP_DIR=""
-try:
-    import cdat_info
-    externals = cdat_info.externals
-except:
-    externals = os.path.join(sys.prefix,"Externals")
-externals = os.environ.get("EXTERNALS",externals)
-
-for o in sys.argv[1:]:
-    pth = o.lower().split('with-opendap=')
-    if len(pth)>1:
-        CDMS_DAP_DIR=pth[1]
-        
-if CDMS_DAP_DIR is "":
-    CDMS_DAP_DIR=os.path.join(externals,'OpenDAP')
diff --git a/installation/HDF.py b/installation/HDF.py
deleted file mode 100644
index 23830d08e415cd5816a0a0308de32d0823bf3bcf..0000000000000000000000000000000000000000
--- a/installation/HDF.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# To build on Linux with HDF:
-# express_install /usr/local/cdat/somewhere --force --configuration installation/hdf.py
-import os
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-
-sys.path.append(src_dir)
-sys.path.append(installation_script_dir)
-
-CDMS_INCLUDE_HDF='yes'
-CDMS_HDF_DIR=""
-try:
-    import cdat_info
-    externals = cdat_info.externals
-except:
-    externals = os.path.join(sys.prefix,"Externals")
-externals = os.environ.get("EXTERNALS",externals)
-
-for o in sys.argv[1:]:
-    pth = o.lower().split('with-hdf4=')
-    if len(pth)>1:
-        CDMS_HDF_DIR=pth[1]
-
-if CDMS_HDF_DIR is "":
-    CDMS_HDF_DIR=os.path.join(externals,'HDF')
diff --git a/installation/cdmsonly.py b/installation/cdmsonly.py
deleted file mode 100644
index 90ea1182485315b04ac1dfc526ce786afadd119b..0000000000000000000000000000000000000000
--- a/installation/cdmsonly.py
+++ /dev/null
@@ -1,16 +0,0 @@
-packages = [ 
-	    "Packages/AutoAPI",
-            "Packages/cdtime",
-            "Packages/regrid2",
-#            "Packages/regrid",
-            "Packages/Properties",
-	    "Packages/kinds",
-            "Packages/cdms2", 
-            "Packages/genutil", 
-            "Packages/cdutil", 
-            "Packages/unidata", 
-#            "Packages/cdms", 
-            "Packages/ncml", 
-            "Packages/esg",
-            "Packages/distarray",
-           ]
diff --git a/installation/contrib.py b/installation/contrib.py
deleted file mode 100644
index 606aaf5fdf35fa18fb6a73a157cf58cd5efd49ae..0000000000000000000000000000000000000000
--- a/installation/contrib.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import os
-dostandard = force
-## try:
-##     import Numeric, cdms
-## except ImportError:
-##     dostandard = 1
-dostandard = 1
-import sys
-if not 'clist' in locals().keys():
-    clist=[]
-## Format is [path,description,licence_file]
-Clist = [
-#    ['contrib/Sphinx','sphinx documentation builder','GNU'],
-##     ['contrib/zope-interface','zope interface','GNU'],
-#    ['contrib/Twisted','network computing tools','GNU'],
-#    ['contrib/Foolscap','RPC protocol for Python+Twisted','GNU'],
-#    ['contrib/ipython','an Enhanced Python Shell','GNU'],
-#    ['contrib/scipy','Scientific tools for Python (core only)','GNU'],
-    ['contrib/SP','A collection of Python modules that are useful for scientific computing.','LICENSE'],
-    ['contrib/cssgrid','An interpolation package for random data on the surface of a sphere based on the work of Robert Renka. cssgrid uses cubic splines to calculate its interpolation function.',''],
-    ['contrib/lmoments','56 routines for statistical analysis using L-moments','UC'],
-    ['contrib/ort','Reads in Oort data files','UC'],
-#    ['contrib/spherepack','A collection of programs for computing certain common differential operators and performing related manipulations on a sphere.',''],
-    ['contrib/asciidata','Reads in ASCII files with the ability to specify tab or comma or space delimited fields','Lib/ASV.py'],
-    ['contrib/eof','Calculates Empirical Orthogonal Functions of either one variable or two variables jointly','UC'],
-    ['contrib/trends','Computes variance estimate taking auto-correlation into account.',''],
-    ['contrib/binaryio','Handles binary or unformatted data',''],
-    ['contrib/regridpack','A collection of programs for linear or cubic interpolation in one, two, three or four dimensions.',''],
-    ['contrib/shgrid','An interpolation package for random data in 3-space based on the work of Robert Renka. shgrid uses a modified Shepard\'s algorithm to calculate its interpolation function',''],
-    ['contrib/dsgrid','A three-dimensional random data interpolator based on a simple inverse distance weighting algorithm.',''],
-    ['contrib/pyclimate','Provides functions to perform some simple IO operations, operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA analysis of coupled data sets, some linear digital filters, kernel based probability density function estimation and access to DCDFLIB.C library from Python.','GNU'],
-    ['contrib/ComparisonStatistics','Calculates statistics (e.g., correlations and RMS differences) that quantify differences between two datasets. Allows for ellaborated masking and regridding operations','UC'],
-    ['contrib/IaGraph','Package for Quick Interactive Graphing','GNU'],
-    ['contrib/MSU','Package to compute Equivalent MSU Temperatures','UC'],
-    ['contrib/EzTemplate','Package to generate VCS templates easily','GNU'],
-    ['contrib/ZonalMeans','Package to compute zonal means on any grid (requires f90 compiler)','GNU'],
-    ['contrib/HDF5Tools','Package to read HDF5 files into CDAT (requires h5dump binary utility)','GNU'],
-# following is now built via externals
-#    ['contrib/eof2','',''],
-#    ['contrib/eofs','',''],
-#    ['contrib/windspharm','','GNU'],
-]
-
-# natgrid has illegal C comments but gcc lets them through...
-# we need to fix it.
-NCARG_ROOT = os.environ.get('NCARG_ROOT')
-NCARG_COLORMAP_PATH = os.environ.get('NCARG_COLORMAP_PATH')
-if NCARG_COLORMAP_PATH or NCARG_ROOT :
-    Clist.append(['contrib/pyncl','Generate NCL plots of cdms transient variables',''])
-
-
-if sys.platform == "linux2" or sys.platform == 'darwin':
-    Clist.append(['contrib/natgrid','A two-dimensional random data interpolation package based on Dave Watson\'s nngridr',''])
-
-if '--enable-R' in sys.argv or '--enable-r' in sys.argv:
-    Clist.append(['contrib/Rpy','Python Interface to the R library','GNU'])
-
-if '--enable-ioapi' in sys.argv :
-    Clist.append(['contrib/pyIoapi','Python Interface to the IoAPI library','GNU'])
-    Clist.append(['contrib/egenix',"Collection of  tools which enhance Python's usability in many important areas such as ODBC database connectivity, fast text processing, date/time processing and web site programming.",'LICENSE'])
-    Clist.append(['contrib/ioapiTools','ioapiTools developped by Alexis Zubrow form University of Chicago','GNU'])
-
-if '--enable-spanlib' in sys.argv :
-    Clist.append(['contrib/spanlib','Package to do Spectral analysis','GNU'],)
-    
-if not dostandard:
-    packages = []
-
-for c in Clist:
-    clist.append(c)
-    packages.append(c[0])
-
-    
diff --git a/installation/control.py b/installation/control.py
deleted file mode 100644
index 7a3a520919de91bf4f33afcfaeeafc668ab153af..0000000000000000000000000000000000000000
--- a/installation/control.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# This file is used to control the behavior of install.py.
-
-# The search path is used if the X11 directories aren't configured.
-x11search = ['/usr/X11R6', '/usr/X11R6.5.1', 
-             '/usr/X11R6.4','/usr','/usr/openwin','/opt']
-# Here is where they are on OSF1 and perhaps similar systems
-x11OSF1lib = ['/usr/lib/X11', '/usr/lib']
-x11OSF1include = ['/usr/include/X11']
-
-# Controlling the install itself
-force=0    # Force a complete recompilation?
-norun=0   # Cause _install just to echo command?
-echo=0     # Echo installation commands before executing?
-log=1      # Make logs?
-silent = 0 # Report progress?
-
-import os,sys
-current_dir = os.path.dirname(__file__)
-build_dir = os.getcwd()
-sys.path.append(build_dir)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-
-sys.path.append(src_dir)
-sys.path.append(installation_script_dir)
-
-# Configuration
-do_configure = 1
-if os.path.isfile(os.path.join(build_dir,'cdat_info.py')):
-    try:
-      import cdat_info
-      do_configure = 0
-    except:
-      pass
-
-finish="""
-******************************************************
-Success! CDAT has been installed in %s .
-Make sure all Packages built successfully
-******************************************************
-
-""" %(sys.prefix,)
-
-# Options used for building setup.py, install_script, make
-if os.environ.has_key('MAKE'):
-    make_code = os.environ['MAKE']
-else:
-    make_code = 'make'
-
-# List of packages to be built
-packages = [ 
-    "Packages/pydebug",
-    "Packages/cdtime",
-    "Packages/demo",
-    "Packages/help",
-    "Packages/regrid2",
-    "Packages/cdms2", 
-    "Packages/esg", 
-    "Packages/ncml",
-    "Packages/DV3D",
-    "Packages/vcs",
-    "Packages/vcsaddons",
-    "Packages/cdutil",
-    "Packages/unidata",
-    "Packages/xmgrace",
-    "Packages/genutil", 
-    "Packages/Thermo",
-    "Packages/WK",
-    "Packages/gui_support",
-    "Packages/distarray",
-    ]
diff --git a/installation/debug.py b/installation/debug.py
deleted file mode 100644
index 87fcd2bc9f1bba7a758aeaac9e5e1d4887cf7001..0000000000000000000000000000000000000000
--- a/installation/debug.py
+++ /dev/null
@@ -1,12 +0,0 @@
-## action['setup.py'] = sys.executable + ' setup.py build --debug install'
-## action['install_script'] = './install_script --debug ' + sys.exec_prefix
-## for k in ['makefile','Makefile','MAKEFILE']:
-##     action[k] = make_code + " PREFIX='%s' DEBUG=1 install " % sys.exec_prefix
-
-# matplotlib depends on pkg-config under install/bin
-action['setup.py'] = 'PATH=%s/bin:$PATH && %s setup.py build --debug install --prefix=%s ; ' \
-    % (sys.exec_prefix, sys.executable, target_prefix)
-action['install_script'] = './install_script  %s %s --debug ; ' % (target_prefix, sys.exec_prefix)
-for k in ['makefile','Makefile','MAKEFILE']:
-    action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' DEBUG=1 install ; " % (sys.exec_prefix,target_prefix)
-action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s  --with-python=%s ; make ; make install ;" % (os.path.join(os.path.split(target_prefix)[0],'Externals'), os.path.join(sys.exec_prefix,'bin','python'))
diff --git a/installation/irix.py b/installation/irix.py
deleted file mode 100644
index 04e8318aa5daa75b203da3c545fdb9c2ed5a13fc..0000000000000000000000000000000000000000
--- a/installation/irix.py
+++ /dev/null
@@ -1,2 +0,0 @@
-x11include='/usr/include/X11'
-x11libdir='/usr/lib/X11'
diff --git a/installation/pcmdi.py b/installation/pcmdi.py
deleted file mode 100644
index 141884ef28afcc3e26e0ef3ba097e0bfd07f951f..0000000000000000000000000000000000000000
--- a/installation/pcmdi.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Edit this configuration file before building.
-# Always build with --force after changing a configuration.
-# You do not need to rebuild Python itself.
-CDMS_INCLUDE_DRS='yes'
-# if sys.platform=="linux2":
-#   COMPILER_EXTRA_LIBS=['pgftnrtl','pgc']
-# else:
-#   COMPILER_EXTRA_LIBS=[]
-COMPILER_EXTRA_LIBS=["gfortran",]
-#if sys.platform[0:3] == "aix":  # and probably other platforms...
-#    CMDS_INCLUDE_QL = 'no'
-#else:
-#    CDMS_INCLUDE_QL ='yes'
-
-# These don't actually get respected by the libcdms build yet.
-# drs_file = '/usr/local/lib/libdrs.a'
-
-# Add on additional packages
-#packages.append('Packages/psql')
-#packages.append('Packages/cu')
-#packages.append('Packages/pcmdi')
-
diff --git a/installation/pp.py b/installation/pp.py
deleted file mode 100644
index 6c5abf9c0b6313b17422f2e6608e6c6f85cd3c2c..0000000000000000000000000000000000000000
--- a/installation/pp.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# To build CDMS with support for the Met Office PP format:
-# express_install /usr/local/cdat/somewhere --force --configuration=installation/pp.py
-CDMS_INCLUDE_PP='yes'
diff --git a/installation/psql.py b/installation/psql.py
deleted file mode 100644
index d3b52b6ebc233f9a9097d64ee0266c0fa35affe9..0000000000000000000000000000000000000000
--- a/installation/psql.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Add on additional packages
-CDMS_INCLUDE_QL ='yes'
-packages.append('Packages/psql')
diff --git a/installation/standard.py b/installation/standard.py
deleted file mode 100644
index b86f594dc10de97def90ed0072529ae8283f3f6a..0000000000000000000000000000000000000000
--- a/installation/standard.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# DO NOT EDIT THIS FILE
-# Instead, make your own configuration file to override these values 
-# and use the -c option to read it.
-
-# This is the standard configuration file. It is read first by install.py.
-# In your own configuration file you can use any Python statements to modify
-# these values. 
-
-# File pcmdi.txt is an example that shows the changes we use at PCMDI.
-
-# Append to packages to build additional packages, such as
-# packages.append('cu')
-
-#This file is executed as Python input so you can compute values depending on
-#platform, etc. Modules os, sys will be imported already.
-
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-libcdms_dir = os.path.join(src_dir, 'libcdms')
-
-## This part figures out the target thing
-target_prefix = sys.prefix
-for i in range(len(sys.argv)):
-    a = sys.argv[i]
-    if a=='--prefix':
-        target_prefix=sys.argv[i+1]
-    sp = a.split("--prefix=")
-    if len(sp)==2:
-        target_prefix=sp[1]
-
-
-# This is where we build netcdf, if you let us
-#netcdf_directory = os.popen('%s --prefix' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0]
-#netcdf_include_directory = os.popen('%s --includedir' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0]
-#netcdf_include_directory= os.path.join(os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')),'include')
-
-#  Control of the CDMS build
-drs_file = '/usr/local/libdrs.a'  # only needed if next line is 'yes'
-CDMS_INCLUDE_DRS='no'    # Change to 'yes' to include DRS. If yes:
-                         # Assumes /usr/local/lib/libdrs.a exists.
-                         # Assumes you have a Fortran compiler.
-CDMS_INCLUDE_QL='no'     # Include QL in build?
-                         # Caution: if set to yes, CDMS library compile
-                         # may fail on certain platforms, including AIX.
-CDMS_INCLUDE_HDF='no'    # Don't set to yes, doesn't work.
-CDMS_INCLUDE_PP='no'     # Met Office PP format is built in to cdunif.
-#  Location of X11 library
-#     If you set x11libdir (that is two ones and an el) AND x11include to point
-#     to the lib and include directories, they will be used.
-#     Otherwise a search is made for common locations.
-if sys.platform in ['mac']:
-   x11libdir='/usr/X11R6/lib'
-   x11include='/usr/X11R6/include'
-else:
-   x11libdir=''
-   x11include=''
-
-#  List of math libraries
-#    We attempt to get the C math libraries right but if we don't fix it.
-mathlibs= ['m']  #i.e., libm.a
-if sys.platform in ['win32', 'mac', 'beos5']:
-    mathlibs = []
-
-# Build actions
-action = {}
-## Commenting out pyfort not used anylonger (it's been years)
-#if os.path.exists(os.path.join(target_prefix, 'bin', 'pyfort')):
-#    action['*.pfp'] = os.path.join(target_prefix, 'bin', 'pyfort') + " -i %(file)s ; "
-#elif os.path.exists(os.path.join(sys.exec_prefix, 'bin', 'pyfort')):
-#    action['*.pfp'] = os.path.join(sys.exec_prefix, 'bin', 'pyfort') + " -i %(file)s ; "
-#else:
-#    action['*.pfp'] = "pyfort  -i %(file)s ; "
-    
-# matplotlib depends on pkg-config
-action['setup.py'] = 'PATH=%s/bin:$PATH  %s setup.py install --prefix=%s ; ' \
-    % (sys.exec_prefix, sys.executable, target_prefix)
-install_script_path = os.path.join(libcdms_dir, 'install_script')
-action['install_script'] = install_script_path + ' %s %s ; ' % (target_prefix, sys.executable)
-for k in ['makefile','Makefile','MAKEFILE']:
-    action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' install ; " % (sys.exec_prefix,target_prefix)
-action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s  --with-python=%s ; make -j1 ; make -j1 install ;" % (os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')), os.path.join(sys.exec_prefix,'bin','python'))
diff --git a/pysrc/README.txt b/pysrc/README.txt
deleted file mode 100644
index 040a48ab711525181bf3a88b0a88f4220080dff0..0000000000000000000000000000000000000000
--- a/pysrc/README.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-This directory contains all the sources for building a Python suitable for
-use with CDAT. 
-
-Changes from standard distributions:
-   a. readline
-      In file readline.c, change definition of RL_LIBRARY_VERSION to avoid
-      the error if this macro is already defined, by undefining it.
-   b. We use a private version of Python's setup.py to have it find
-      our own tcl/tk.
-
-To install:
-./install_script /whereyouwanttoputit 
-
-A subdirectory build will be created that contains the output.
-Some of these products can be tested by changing to their directory under 
-build and typing "make test".
-
-If you put in a new source file you need to remove the old one and run
-./clean_script before building again.
-
-
-OPTIONS:
-you can add: --enable-aqua to the build line to prevent the build of Tcl/Tk
-and use Aqua Native
-you can add: --disable-tkbuild to the build line to prevent the build of Tcl/Tk
-
-Log files are created in the build subdirectory.
-
-Each of the pieces may be built individually using the corresponding .sh 
-files in this directory. Some warning errors are usual from 
-many of the packages and vary from architecture to architecture.
-
-N.B.: The order in which the packages are built matters. 
-
-You can add an 'exit 0' at any appropriate point in install_script if you
-want to go up to that point and then stop.
diff --git a/pysrc/clean_script b/pysrc/clean_script
deleted file mode 100755
index 185cc2b0e81960ddc82b2f57fc16173a159b106f..0000000000000000000000000000000000000000
--- a/pysrc/clean_script
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin/rm -fr build >/dev/null 2>&1
-find . -name 'config.cache' -print -exec rm {} \; 
diff --git a/pysrc/install_script.obsolete b/pysrc/install_script.obsolete
deleted file mode 100755
index a96a6fab401651dec84c6f17c0c72641ac7e5b30..0000000000000000000000000000000000000000
--- a/pysrc/install_script.obsolete
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/bin/sh
-d=`pwd`
-if [ -n "$PYTHONPATH" ]; then
-    echo "PYTHONPATH environment variable should not be set!"
-    exit 1
-fi
-
-if [ -n "$PYTHONHOME" ]; then
-   echo "PYTHONHOME environment variable should not be set!"
-   exit 1
-fi
-
-echo "Building Zlib, Readline, Tcl, Tk, and Python."
-echo "Logs are in $d/build"
- 
-
-OS=`uname`
-AQUA=no
-TK=yes
-all=$*
-READLINE=yes
-ZLIB=yes
-OSver=`uname -r`
-OSMajor=`uname -r | cut -d. -f1`
-
-s=$1; shift;
-while [ "$#" -ne 0 ]
-do
-  # Translate $1 to lowercase
-  MYOPT=`echo $1 | tr 'A-Z' 'a-z'`
-  if [ "$MYOPT" = "--enable-aqua" ]; then
-      if ( test "${OS}" == "Darwin" ) then
-	  AQUA=yes
-      else
-	  echo "--enable-aqua is for Darwin systems only! Use --disable-tkbuild"
-	  exit 1
-      fi
-      if ( test "${OSMajor}" == "9" ) then
-	  echo "Detected Leopard 10.5, doing the posix thing";
-	  CPPFLAGS="-DSETPGRP_HAVE_ARG "${CFLAGS}
-      fi
-  fi
-  if [ "$MYOPT" = "--disable-tkbuild" ]; then
-      TK=no
-  fi
-  if [ "$MYOPT" = "--disable-externals-build" ]; then
-      TK=no
-      READLINE=no
-      ZLIB=no
-  fi
-  shift
-done
-./prolog.sh ${all}
-if [ $? -ne 0 ]; then
-    echo "Unpacking of tar files failed."
-    exit 1
-fi
-
-
-if [ "${ZLIB}" = "no" ]; then
-    echo "skipping build of zlib"
-else
-    echo "Building zlib"
-    ./zlib.sh $s  >build/zlib.LOG 2>&1
-    if [ $? -ne 0 ]; then
-	echo "Build of zlib failed"
-	exit 1
-    fi
-fi
-
-if [ "${READLINE}" = "no" ]; then
-    echo "skipping build of readline"
-else
-    echo "Building readline"
-    ./readline.sh $s >build/readline.LOG 2>&1
-    if [ $? -ne 0 ]; then
-	echo "Build of readline failed"
-	exit 1
-    fi
-fi
-if [ "${OS}" = "CYGWIN_NT-5.1" ]; then
-   echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution"
-   echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution"
-elif [ "${OS}" = "CYGWIN_NT-6.0" ]; then
-   echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution"
-   echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution"
-elif [ "${AQUA}" = "yes" ]; then
-    echo "Tcl - Using the pre-built tcl library that is part of the standard Darwin distribution (with Aqua support)"
-    echo "Tk - Using the pre-built tk library that is part of the standard Darwin distributioni (with Aqua support)"
-elif [ "${TK}" = "no" ]; then
-    echo "Tcl - Using the pre-built tcl library that is part of your system"
-    echo "Tk - Using the pre-built tk library that is part of your system"
-else
-   echo "Building tcl"
-   ./tcl.sh $s >build/tcl.LOG 2>&1
-   if [ $? -ne 0 ]; then
-       echo "Build of tcl failed."
-       exit 1
-   fi
-   
-   echo "Building tk"
-   ./tk.sh $s >build/tk.LOG 2>&1
-   if [ $? -ne 0 ]; then
-       echo "Build of tk failed."
-       exit 1
-   fi
-fi
-
-echo "Building python"
-./python.sh $s >build/python.LOG 2>&1
-if [ $? -ne 0 ]; then
-    echo "Build of Python failed."
-    exit 1
-fi
-
-echo "Python built successfully."
-
diff --git a/pysrc/prolog.sh b/pysrc/prolog.sh
deleted file mode 100755
index f989095939d5ade9082fd781e05ff23025fff4a6..0000000000000000000000000000000000000000
--- a/pysrc/prolog.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/sh
-if (test "$1" = "--debug") then
-    D="--debug";
-    OPT=-g; 
-    shift
-else
-    D="";
-    OPT=${OPT:=-O}
-fi
-export OPT
-
-OS=`uname`
-
-if (test -z "$1") then
-    echo "Usage: $0 prefix";
-    exit 1
-fi
-version=`more ../version`
-
-if (test ! -d $1) then
-    echo -n "$1/${version} is not a directory; create it? (y/[n])";
-    y='n'
-    read y;
-    if (test ${y} = 'y') then
-        mkdir -p $1/${version}/bin; mkdir $1/${version}/lib; mkdir $1/${version}/include ; mkdir -p $1/Externals/bin ; mkdir $1/Externals/lib ; mkdir $1/Externals/share ; mkdir $1/Externals/include
-        if (test ! -d $1) then
-            echo "Could not create $1, installation aborted.";
-            exit 1
-        fi
-    else
-        echo 'Installation aborted.';
-        exit 1
-    fi
-fi
-echo "Created  $1/${version} and $1/Externals directories"
-echo "Python/CDAT built in $1/${version} and external dependencies binaries and libs are built to $1/Externals"
-
-prefix=`(cd $1;pwd)`
-
-if (test ! -d build) then
-    # Unpack everything into build
-    mkdir build
-    /bin/cp src/*gz build
-    cd build
-    OS=`uname`
-    if (test "${OS}" = "Linux" ) then
-         TAR=`which tar`
-    elif (test "${OS}" = "Darwin" ) then
-    	 TAR=`which tar`
-    elif (test "${OS}" = "CYGWIN_NT-5.1" ) then
-         TAR=`which tar`
-    elif (test "${OS}" = "CYGWIN_NT-6.0" ) then
-         TAR=`which tar`
-    elif (test "${OS}" = "AIX" ) then
-         TAR=`which tar`
-    else
-         echo "Building tar for non GNU OS to unpack Python, some error messages may be generated but can be ignored"
-	 chmod +w tar*gz
-         for x in tar*gz;
-         do 
-           gunzip -f $x;
-           tar xf `basename $x .gz`;
-	   (cd tar-* ; ./configure --prefix=$1/Externals ; make ; make install; cd .. )> LOG.prolog;
-           TAR=$1/Externals/bin/tar
-         done
-    fi
-    #rm tar*gz
-    chmod +w *.gz 
-    for x in *.gz; 
-    do 
-        echo "$x"; 
-	gunzip -f $x;
-        ${TAR} xf `basename $x .gz`;
-        /bin/rm -f `basename $x .gz`;
-    done
-#    for x in *.tgz; 
-#    do 
-#        echo "$x"; 
-#        ${TAR} xzf $x;
-#        /bin/rm -f $x
-#    done
-    cd ..
-fi
-cd build
-echo "Installation to ${prefix}"
diff --git a/pysrc/python.sh b/pysrc/python.sh
deleted file mode 100755
index 3e0d844b76358cfdaf7aa9b45c7e5f7206a5a11a..0000000000000000000000000000000000000000
--- a/pysrc/python.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-# Python, idle
-# This needs to be set or Python's installer will conclude _tkinter cannot
-# be imported.
-CCTEMP=${CC-gcc}
-# Get command name WITHOUT the parameters
-CCTEMP=`echo $CCTEMP | awk '{print $1}'` 
-if (test "${CCTEMP}" = "gcc") then
-config_opt="--with-gcc --without-cxx"
-else
-   config_opt="--without-gcc --without-cxx"
-fi
-OS=`uname`
-if (test "${OS}" = "Darwin") then  # MacIntosh OSX
-   CPPFLAGS="-I${prefix}/Externals/include"; export CPPFLAGS
-   LDFLAGS="-L${prefix}/Externals/lib"; export LDFLAGS
-   config_opt=""
-   OPT=""; export OPT
-fi
-getaddrbug=""
-if (test "${OS}" = "OSF1") then
-    getaddrbug="--disable-ipv6"
-fi
-if (test "${OS}" = "AIX") then
-    getaddrbug="--disable-ipv6"
-fi
-cd Python*
-/bin/rm -f setup.py
-/bin/cp ../../src/setup.py setup.py
-CDAT_PREFIX=${prefix}/Externals; export CDAT_PREFIX
-if (test "${OS}" = "Linux") then  # Linux -- needed for readline
-   export LDFLAGS="-L${prefix}/Externals/lib -Wl,-R${prefix}/Externals/lib"
-   if (test "${CCTEMP}" = "icc") then  # zeus x86_64 with Intel compiler
-      if (test "${IC}" = "") then
-	  echo "Run 'use <compiler>' to set environment variable IC to the location of libimf.a, libirc.a"
-	  exit 1
-      fi
-      export LDFLAGS="${LDFLAGS} -L${IC}/lib -limf -lirc"
-   fi
-fi
-./configure ${config_opt} --prefix=${prefix}/${version} ${getaddrbug}
-if (test $? -ne 0) then
-    echo "Python configure failed."; exit 1;
-fi
-
-make 
-if (test $? -ne 0) then
-    echo "Python make failed."; exit 1;
-fi
-
-make install 
-if (test $? -ne 0) then
-    echo "Python install failed."; exit 1;
-fi
-
-#cd Tools/idle
-#${prefix}/bin/python setup.py install
-#if (test $? -ne 0) then
-#    echo "Python idle install failed."; exit 1;
-#fi
-mkdir -p ${prefix}/Externals/share
-if (test "${OS}" = "CYGWIN_NT-5.1" ) then
-    ln -s /usr/share/tcl* ${prefix}/Externals/share ;
-    ln -s /usr/share/tk* ${prefix}/Externals/share ;
-fi
-if (test "${OS}" = "CYGWIN_NT-6.0" ) then
-    ln -s /usr/share/tcl* ${prefix}/Externals/share ;
-    ln -s /usr/share/tk* ${prefix}/Externals/share ;
-fi
-
-${prefix}/${version}/bin/python -c "import Tkinter"
-if (test $? -ne 0) then
-    echo "Python Tkinter import failed."; exit 1;
-fi
-echo "Python built with Tkinter correctly." 
diff --git a/pysrc/readline.sh b/pysrc/readline.sh
deleted file mode 100755
index 40f2d97d2d29c7375649148a48bca45bedb80ab0..0000000000000000000000000000000000000000
--- a/pysrc/readline.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-cd readline-*
-./configure --prefix=${prefix}/Externals
-if (test $? -ne 0) then
-    echo "readline configuration failed.";
-    echo "Some platforms don't support readline, this doesn't matter.";
-    echo "Ignoring this error.";
-    exit 0;
-fi
-make
-if (test $? -ne 0) then
-    echo "readline make failed.";
-    echo "Some platforms don't support readline, this doesn't matter.";
-    echo "Ignoring this error.";
-    exit 0;
-fi
-make install
-if (test $? -ne 0) then
-    echo "readline install failed.";
-    echo "This is unexpected since it built ok.";
-    exit 1;
-fi
diff --git a/pysrc/src/setup-2.7.1.py b/pysrc/src/setup-2.7.1.py
deleted file mode 100644
index c7d059069474ff7ad2d9f8b924d70503ff4c4647..0000000000000000000000000000000000000000
--- a/pysrc/src/setup-2.7.1.py
+++ /dev/null
@@ -1,2067 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision: 86041 $"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if sys.platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if sys.platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-        platform = self.get_platform()
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def detect_modules(self):
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if sys.platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 8)
-        min_db_ver = (4, 1)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                    db_incdir.replace("include", 'lib/x86_64-linux-gnu')
-                ]
-
-                if sys.platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguoius dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if sys.platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        ## PCMDI changes look for AQUA_CDAT env variable to decide
-        if os.environ.get("AQUA_CDAT","no")=="yes" :
-            if (platform == 'darwin' and
-                self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-                return
-        ## End of pcmdi changes (we just added the if test
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.2.py b/pysrc/src/setup-2.7.2.py
deleted file mode 100644
index 1f9c9b83df1dcfad916f5de89b3a8324a4283c78..0000000000000000000000000000000000000000
--- a/pysrc/src/setup-2.7.2.py
+++ /dev/null
@@ -1,2090 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if sys.platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if sys.platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-        platform = self.get_platform()
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        if not find_executable('dpkg-architecture'):
-            return
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            tmpfile)
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if sys.platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 8)
-        min_db_ver = (4, 1)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                    db_incdir.replace("include", 'lib/x86_64-linux-gnu')
-                ]
-
-                if sys.platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if sys.platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        ## PCMDI changes look for AQUA_CDAT env variable to decide
-        if os.environ.get("AQUA_CDAT","no")=="yes" :
-            if (platform == 'darwin' and
-                self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-                return
-        ## End of pcmdi changes (we just added the if test
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.3.py b/pysrc/src/setup-2.7.3.py
deleted file mode 100644
index 4026128ebd738e2116f44e5b63cf508a6c912f5a..0000000000000000000000000000000000000000
--- a/pysrc/src/setup-2.7.3.py
+++ /dev/null
@@ -1,2094 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if sys.platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if sys.platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-        platform = self.get_platform()
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        if not find_executable('dpkg-architecture'):
-            return
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            tmpfile)
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if sys.platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 8)
-        min_db_ver = (4, 1)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                    db_incdir.replace("include", 'lib/x86_64-linux-gnu')
-                ]
-
-                if sys.platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if sys.platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        if (platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.4.py b/pysrc/src/setup-2.7.4.py
deleted file mode 100644
index ea8a5f51e963128d8e0487d14c54c5ed36d4c54c..0000000000000000000000000000000000000000
--- a/pysrc/src/setup-2.7.4.py
+++ /dev/null
@@ -1,2186 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
-
-def get_platform():
-    # cross build
-    if "_PYTHON_HOST_PLATFORM" in os.environ:
-        return os.environ["_PYTHON_HOST_PLATFORM"]
-    # Get value of sys.platform
-    if sys.platform.startswith('osf1'):
-        return 'osf1'
-    return sys.platform
-host_platform = get_platform()
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
-                or path.startswith('/System/')
-                or path.startswith('/Library/') )
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if host_platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if host_platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if host_platform == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if host_platform == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-
-        # Don't try to load extensions for cross builds
-        if cross_compiling:
-            return
-
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        cc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
-        multiarch_path_component = ''
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-        finally:
-            os.unlink(tmpfile)
-
-        if multiarch_path_component != '':
-            add_dir_to_list(self.compiler.library_dirs,
-                            '/usr/lib/' + multiarch_path_component)
-            add_dir_to_list(self.compiler.include_dirs,
-                            '/usr/include/' + multiarch_path_component)
-            return
-
-        if not find_executable('dpkg-architecture'):
-            return
-        opt = ''
-        if cross_compiling:
-            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            (opt, tmpfile))
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def add_gcc_paths(self):
-        gcc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'gccpaths')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
-        is_gcc = False
-        in_incdirs = False
-        inc_dirs = []
-        lib_dirs = []
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    for line in fp.readlines():
-                        if line.startswith("gcc version"):
-                            is_gcc = True
-                        elif line.startswith("#include <...>"):
-                            in_incdirs = True
-                        elif line.startswith("End of search list"):
-                            in_incdirs = False
-                        elif is_gcc and line.startswith("LIBRARY_PATH"):
-                            for d in line.strip().split("=")[1].split(":"):
-                                d = os.path.normpath(d)
-                                if '/gcc/' not in d:
-                                    add_dir_to_list(self.compiler.library_dirs,
-                                                    d)
-                        elif is_gcc and in_incdirs and '/gcc/' not in line:
-                            add_dir_to_list(self.compiler.include_dirs,
-                                            line.strip())
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        inc_dirs = self.compiler.include_dirs[:]
-        lib_dirs = self.compiler.library_dirs[:]
-        if not cross_compiling:
-            for d in (
-                '/usr/include',
-                ):
-                add_dir_to_list(inc_dirs, d)
-            for d in (
-                '/lib64', '/usr/lib64',
-                '/lib', '/usr/lib',
-                ):
-                add_dir_to_list(lib_dirs, d)
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if host_platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if host_platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        # HP-UX11iv3 keeps files in lib/hpux folders.
-        if host_platform == 'hp-ux11':
-            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
-
-        if host_platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if host_platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if host_platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if host_platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if host_platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if host_platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
-                               depends=['socketmodule.h'],
-                               libraries=math_libs) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if host_platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (5, 3)
-        min_db_ver = (4, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 5:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((5, x)):
-                        yield x
-            elif major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        if cross_compiling:
-            db_inc_paths = []
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-
-                if host_platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        if cross_compiling:
-            sqlite_inc_paths = []
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d_ in inc_dirs + sqlite_inc_paths:
-            d = d_
-            if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                d = os.path.join(sysroot, d[1:])
-
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if host_platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if host_platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if host_platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = host_platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if host_platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others have -lgdbm_compat,
-                        # others don't have either
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        elif self.compiler.find_library_file(lib_dirs,
-                                                             'gdbm_compat'):
-                            ndbm_libs = ['gdbm_compat']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if host_platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if host_platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and host_platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if host_platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if host_platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-            expat_depends = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-            expat_depends = ['expat/ascii.h',
-                             'expat/asciitab.h',
-                             'expat/expat.h',
-                             'expat/expat_config.h',
-                             'expat/expat_external.h',
-                             'expat/internal.h',
-                             'expat/latin1tab.h',
-                             'expat/utf8tab.h',
-                             'expat/xmlrole.h',
-                             'expat/xmltok.h',
-                             'expat/xmltok_impl.h'
-                             ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources,
-                              depends = expat_depends,
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  depends = ['pyexpat.c'] + expat_sources +
-                                      expat_depends,
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (host_platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if host_platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif host_platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif host_platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if host_platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if host_platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or host_platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if host_platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if host_platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        if (host_platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in host_platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if host_platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if host_platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if host_platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if host_platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if host_platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
-                               if (('--host=' in arg) or ('--build=' in arg))]
-                if not self.verbose:
-                    config_args.append("-q")
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if host_platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif host_platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif host_platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if host_platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.7.py b/pysrc/src/setup-2.7.7.py
deleted file mode 100644
index 9a92bc3a7925bd990fd46d6b10868ebaad309e8e..0000000000000000000000000000000000000000
--- a/pysrc/src/setup-2.7.7.py
+++ /dev/null
@@ -1,2244 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
-
-def get_platform():
-    # cross build
-    if "_PYTHON_HOST_PLATFORM" in os.environ:
-        return os.environ["_PYTHON_HOST_PLATFORM"]
-    # Get value of sys.platform
-    if sys.platform.startswith('osf1'):
-        return 'osf1'
-    return sys.platform
-host_platform = get_platform()
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
-                or path.startswith('/System/')
-                or path.startswith('/Library/') )
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if host_platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if host_platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if host_platform == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if host_platform == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-
-        # Don't try to load extensions for cross builds
-        if cross_compiling:
-            return
-
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        cc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
-        multiarch_path_component = ''
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-        finally:
-            os.unlink(tmpfile)
-
-        if multiarch_path_component != '':
-            add_dir_to_list(self.compiler.library_dirs,
-                            '/usr/lib/' + multiarch_path_component)
-            add_dir_to_list(self.compiler.include_dirs,
-                            '/usr/include/' + multiarch_path_component)
-            return
-
-        if not find_executable('dpkg-architecture'):
-            return
-        opt = ''
-        if cross_compiling:
-            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            (opt, tmpfile))
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def add_gcc_paths(self):
-        gcc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'gccpaths')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
-        is_gcc = False
-        in_incdirs = False
-        inc_dirs = []
-        lib_dirs = []
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    for line in fp.readlines():
-                        if line.startswith("gcc version"):
-                            is_gcc = True
-                        elif line.startswith("#include <...>"):
-                            in_incdirs = True
-                        elif line.startswith("End of search list"):
-                            in_incdirs = False
-                        elif is_gcc and line.startswith("LIBRARY_PATH"):
-                            for d in line.strip().split("=")[1].split(":"):
-                                d = os.path.normpath(d)
-                                if '/gcc/' not in d:
-                                    add_dir_to_list(self.compiler.library_dirs,
-                                                    d)
-                        elif is_gcc and in_incdirs and '/gcc/' not in line:
-                            add_dir_to_list(self.compiler.include_dirs,
-                                            line.strip())
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-        # PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # End PCMDI Changes
-        # Ensure that /usr/local is always used
-        if not cross_compiling:
-            add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-            add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        if cross_compiling:
-            self.add_gcc_paths()
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        inc_dirs = self.compiler.include_dirs[:]
-        lib_dirs = self.compiler.library_dirs[:]
-        if not cross_compiling:
-            for d in (
-                '/usr/include',
-                ):
-                add_dir_to_list(inc_dirs, d)
-            for d in (
-                '/lib64', '/usr/lib64',
-                '/lib', '/usr/lib',
-                ):
-                add_dir_to_list(lib_dirs, d)
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if host_platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if host_platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        # HP-UX11iv3 keeps files in lib/hpux folders.
-        if host_platform == 'hp-ux11':
-            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
-
-        if host_platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if host_platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if host_platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if host_platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if host_platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if host_platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
-                               depends=['socketmodule.h'],
-                               libraries=math_libs) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if host_platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (5, 3)
-        min_db_ver = (4, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 5:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((5, x)):
-                        yield x
-            elif major == 4:
-                for x in range(9):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        if cross_compiling:
-            db_inc_paths = []
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-
-                if host_platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        if cross_compiling:
-            sqlite_inc_paths = []
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d_ in inc_dirs + sqlite_inc_paths:
-            d = d_
-            if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                d = os.path.join(sysroot, d[1:])
-
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if host_platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if host_platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if host_platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = host_platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if host_platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others have -lgdbm_compat,
-                        # others don't have either
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        elif self.compiler.find_library_file(lib_dirs,
-                                                             'gdbm_compat'):
-                            ndbm_libs = ['gdbm_compat']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if host_platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if host_platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        curses_incs = None
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            curses_incs = find_file('curses.h', inc_dirs,
-                                    [os.path.join(d, 'ncursesw') for d in inc_dirs])
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and host_platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h):
-                zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:])
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if host_platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if host_platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-            expat_depends = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-            expat_depends = ['expat/ascii.h',
-                             'expat/asciitab.h',
-                             'expat/expat.h',
-                             'expat/expat_config.h',
-                             'expat/expat_external.h',
-                             'expat/internal.h',
-                             'expat/latin1tab.h',
-                             'expat/utf8tab.h',
-                             'expat/xmlrole.h',
-                             'expat/xmltok.h',
-                             'expat/xmltok_impl.h'
-                             ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources,
-                              depends = expat_depends,
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  depends = ['pyexpat.c'] + expat_sources +
-                                      expat_depends,
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (host_platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if host_platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif host_platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif host_platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if host_platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if host_platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or host_platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if host_platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if host_platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        return missing
-
-    def detect_tkinter_explicitly(self):
-        # Build _tkinter using explicit locations for Tcl/Tk.
-        #
-        # This is enabled when both arguments are given to ./configure:
-        #
-        #     --with-tcltk-includes="-I/path/to/tclincludes \
-        #                            -I/path/to/tkincludes"
-        #     --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \
-        #                        -L/path/to/tklibs -ltkm.n"
-        #
-        # These values can also be specified or overriden via make:
-        #    make TCLTK_INCLUDES="..." TCLTK_LIBS="..."
-        #
-        # This can be useful for building and testing tkinter with multiple
-        # versions of Tcl/Tk.  Note that a build of Tk depends on a particular
-        # build of Tcl so you need to specify both arguments and use care when
-        # overriding.
-
-        # The _TCLTK variables are created in the Makefile sharedmods target.
-        tcltk_includes = os.environ.get('_TCLTK_INCLUDES')
-        tcltk_libs = os.environ.get('_TCLTK_LIBS')
-        if not (tcltk_includes and tcltk_libs):
-            # Resume default configuration search.
-            return 0
-
-        extra_compile_args = tcltk_includes.split()
-        extra_link_args = tcltk_libs.split()
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        extra_compile_args = extra_compile_args,
-                        extra_link_args = extra_link_args,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Check whether --with-tcltk-includes and --with-tcltk-libs were
-        # configured or passed into the make target.  If so, use these values
-        # to build tkinter and bypass the searches for Tcl and TK in standard
-        # locations.
-        if self.detect_tkinter_explicitly():
-            return
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        if (host_platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in host_platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if host_platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if host_platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if host_platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if host_platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if host_platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
-                               if (('--host=' in arg) or ('--build=' in arg))]
-                if not self.verbose:
-                    config_args.append("-q")
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if host_platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif host_platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif host_platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if host_platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup.py b/pysrc/src/setup.py
deleted file mode 100644
index 9a92bc3a7925bd990fd46d6b10868ebaad309e8e..0000000000000000000000000000000000000000
--- a/pysrc/src/setup.py
+++ /dev/null
@@ -1,2244 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
-
-def get_platform():
-    # cross build
-    if "_PYTHON_HOST_PLATFORM" in os.environ:
-        return os.environ["_PYTHON_HOST_PLATFORM"]
-    # Get value of sys.platform
-    if sys.platform.startswith('osf1'):
-        return 'osf1'
-    return sys.platform
-host_platform = get_platform()
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
-                or path.startswith('/System/')
-                or path.startswith('/Library/') )
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if host_platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if host_platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if host_platform == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if host_platform == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-
-        # Don't try to load extensions for cross builds
-        if cross_compiling:
-            return
-
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        cc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
-        multiarch_path_component = ''
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-        finally:
-            os.unlink(tmpfile)
-
-        if multiarch_path_component != '':
-            add_dir_to_list(self.compiler.library_dirs,
-                            '/usr/lib/' + multiarch_path_component)
-            add_dir_to_list(self.compiler.include_dirs,
-                            '/usr/include/' + multiarch_path_component)
-            return
-
-        if not find_executable('dpkg-architecture'):
-            return
-        opt = ''
-        if cross_compiling:
-            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            (opt, tmpfile))
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def add_gcc_paths(self):
-        gcc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'gccpaths')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
-        is_gcc = False
-        in_incdirs = False
-        inc_dirs = []
-        lib_dirs = []
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    for line in fp.readlines():
-                        if line.startswith("gcc version"):
-                            is_gcc = True
-                        elif line.startswith("#include <...>"):
-                            in_incdirs = True
-                        elif line.startswith("End of search list"):
-                            in_incdirs = False
-                        elif is_gcc and line.startswith("LIBRARY_PATH"):
-                            for d in line.strip().split("=")[1].split(":"):
-                                d = os.path.normpath(d)
-                                if '/gcc/' not in d:
-                                    add_dir_to_list(self.compiler.library_dirs,
-                                                    d)
-                        elif is_gcc and in_incdirs and '/gcc/' not in line:
-                            add_dir_to_list(self.compiler.include_dirs,
-                                            line.strip())
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-        # PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # End PCMDI Changes
-        # Ensure that /usr/local is always used
-        if not cross_compiling:
-            add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-            add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        if cross_compiling:
-            self.add_gcc_paths()
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        inc_dirs = self.compiler.include_dirs[:]
-        lib_dirs = self.compiler.library_dirs[:]
-        if not cross_compiling:
-            for d in (
-                '/usr/include',
-                ):
-                add_dir_to_list(inc_dirs, d)
-            for d in (
-                '/lib64', '/usr/lib64',
-                '/lib', '/usr/lib',
-                ):
-                add_dir_to_list(lib_dirs, d)
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if host_platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if host_platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        # HP-UX11iv3 keeps files in lib/hpux folders.
-        if host_platform == 'hp-ux11':
-            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
-
-        if host_platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if host_platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if host_platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if host_platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if host_platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if host_platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
-                               depends=['socketmodule.h'],
-                               libraries=math_libs) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if host_platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (5, 3)
-        min_db_ver = (4, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 5:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((5, x)):
-                        yield x
-            elif major == 4:
-                for x in range(9):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        if cross_compiling:
-            db_inc_paths = []
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-
-                if host_platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        if cross_compiling:
-            sqlite_inc_paths = []
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d_ in inc_dirs + sqlite_inc_paths:
-            d = d_
-            if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                d = os.path.join(sysroot, d[1:])
-
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if host_platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if host_platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if host_platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = host_platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if host_platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others have -lgdbm_compat,
-                        # others don't have either
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        elif self.compiler.find_library_file(lib_dirs,
-                                                             'gdbm_compat'):
-                            ndbm_libs = ['gdbm_compat']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if host_platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if host_platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        curses_incs = None
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            curses_incs = find_file('curses.h', inc_dirs,
-                                    [os.path.join(d, 'ncursesw') for d in inc_dirs])
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and host_platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h):
-                zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:])
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if host_platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if host_platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-            expat_depends = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-            expat_depends = ['expat/ascii.h',
-                             'expat/asciitab.h',
-                             'expat/expat.h',
-                             'expat/expat_config.h',
-                             'expat/expat_external.h',
-                             'expat/internal.h',
-                             'expat/latin1tab.h',
-                             'expat/utf8tab.h',
-                             'expat/xmlrole.h',
-                             'expat/xmltok.h',
-                             'expat/xmltok_impl.h'
-                             ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources,
-                              depends = expat_depends,
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  depends = ['pyexpat.c'] + expat_sources +
-                                      expat_depends,
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (host_platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if host_platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif host_platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif host_platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if host_platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if host_platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or host_platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if host_platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if host_platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        return missing
-
-    def detect_tkinter_explicitly(self):
-        # Build _tkinter using explicit locations for Tcl/Tk.
-        #
-        # This is enabled when both arguments are given to ./configure:
-        #
-        #     --with-tcltk-includes="-I/path/to/tclincludes \
-        #                            -I/path/to/tkincludes"
-        #     --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \
-        #                        -L/path/to/tklibs -ltkm.n"
-        #
-        # These values can also be specified or overriden via make:
-        #    make TCLTK_INCLUDES="..." TCLTK_LIBS="..."
-        #
-        # This can be useful for building and testing tkinter with multiple
-        # versions of Tcl/Tk.  Note that a build of Tk depends on a particular
-        # build of Tcl so you need to specify both arguments and use care when
-        # overriding.
-
-        # The _TCLTK variables are created in the Makefile sharedmods target.
-        tcltk_includes = os.environ.get('_TCLTK_INCLUDES')
-        tcltk_libs = os.environ.get('_TCLTK_LIBS')
-        if not (tcltk_includes and tcltk_libs):
-            # Resume default configuration search.
-            return 0
-
-        extra_compile_args = tcltk_includes.split()
-        extra_link_args = tcltk_libs.split()
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        extra_compile_args = extra_compile_args,
-                        extra_link_args = extra_link_args,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Check whether --with-tcltk-includes and --with-tcltk-libs were
-        # configured or passed into the make target.  If so, use these values
-        # to build tkinter and bypass the searches for Tcl and TK in standard
-        # locations.
-        if self.detect_tkinter_explicitly():
-            return
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        if (host_platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in host_platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if host_platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if host_platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if host_platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if host_platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if host_platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
-                               if (('--host=' in arg) or ('--build=' in arg))]
-                if not self.verbose:
-                    config_args.append("-q")
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if host_platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif host_platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif host_platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if host_platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/site-2.7.7.py b/pysrc/src/site-2.7.7.py
deleted file mode 100644
index c22c48ab5ecf660f35059aa5fa5d3be8d932211f..0000000000000000000000000000000000000000
--- a/pysrc/src/site-2.7.7.py
+++ /dev/null
@@ -1,602 +0,0 @@
-"""Append module search paths for third-party packages to sys.path.
-
-****************************************************************
-* This module is automatically imported during initialization. *
-****************************************************************
-
-In earlier versions of Python (up to 1.5a3), scripts or modules that
-needed to use site-specific modules would place ``import site''
-somewhere near the top of their code.  Because of the automatic
-import, this is no longer necessary (but code that does it still
-works).
-
-This will append site-specific paths to the module search path.  On
-Unix (including Mac OSX), it starts with sys.prefix and
-sys.exec_prefix (if different) and appends
-lib/python<version>/site-packages as well as lib/site-python.
-On other platforms (such as Windows), it tries each of the
-prefixes directly, as well as with lib/site-packages appended.  The
-resulting directories, if they exist, are appended to sys.path, and
-also inspected for path configuration files.
-
-A path configuration file is a file whose name has the form
-<package>.pth; its contents are additional directories (one per line)
-to be added to sys.path.  Non-existing directories (or
-non-directories) are never added to sys.path; no directory is added to
-sys.path more than once.  Blank lines and lines beginning with
-'#' are skipped. Lines starting with 'import' are executed.
-
-For example, suppose sys.prefix and sys.exec_prefix are set to
-/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
-with three subdirectories, foo, bar and spam, and two path
-configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
-following:
-
-  # foo package configuration
-  foo
-  bar
-  bletch
-
-and bar.pth contains:
-
-  # bar package configuration
-  bar
-
-Then the following directories are added to sys.path, in this order:
-
-  /usr/local/lib/python2.5/site-packages/bar
-  /usr/local/lib/python2.5/site-packages/foo
-
-Note that bletch is omitted because it doesn't exist; bar precedes foo
-because bar.pth comes alphabetically before foo.pth; and spam is
-omitted because it is not mentioned in either path configuration file.
-
-After these path manipulations, an attempt is made to import a module
-named sitecustomize, which can perform arbitrary additional
-site-specific customizations.  If this import fails with an
-ImportError exception, it is silently ignored.
-
-"""
-
-import sys
-import os
-import __builtin__
-import traceback
-
-# Prefixes for site-packages; add additional prefixes like /usr/local here
-PREFIXES = [sys.prefix, sys.exec_prefix]
-# Enable per user site-packages directory
-# set it to False to disable the feature or True to force the feature
-ENABLE_USER_SITE = None
-
-# for distutils.commands.install
-# These values are initialized by the getuserbase() and getusersitepackages()
-# functions, through the main() function when Python starts.
-USER_SITE = None
-USER_BASE = None
-
-
-def makepath(*paths):
-    dir = os.path.join(*paths)
-    try:
-        dir = os.path.abspath(dir)
-    except OSError:
-        pass
-    return dir, os.path.normcase(dir)
-
-
-def abs__file__():
-    """Set all module' __file__ attribute to an absolute path"""
-    for m in sys.modules.values():
-        if hasattr(m, '__loader__'):
-            continue   # don't mess with a PEP 302-supplied __file__
-        try:
-            m.__file__ = os.path.abspath(m.__file__)
-        except (AttributeError, OSError):
-            pass
-
-
-def removeduppaths():
-    """ Remove duplicate entries from sys.path along with making them
-    absolute"""
-    # This ensures that the initial path provided by the interpreter contains
-    # only absolute pathnames, even if we're running from the build directory.
-    L = []
-    known_paths = set()
-    for dir in sys.path:
-        # Filter out duplicate paths (on case-insensitive file systems also
-        # if they only differ in case); turn relative paths into absolute
-        # paths.
-        dir, dircase = makepath(dir)
-        if not dircase in known_paths:
-            L.append(dir)
-            known_paths.add(dircase)
-    sys.path[:] = L
-    return known_paths
-
-
-def _init_pathinfo():
-    """Return a set containing all existing directory entries from sys.path"""
-    d = set()
-    for dir in sys.path:
-        try:
-            if os.path.isdir(dir):
-                dir, dircase = makepath(dir)
-                d.add(dircase)
-        except TypeError:
-            continue
-    return d
-
-
-def addpackage(sitedir, name, known_paths):
-    """Process a .pth file within the site-packages directory:
-       For each line in the file, either combine it with sitedir to a path
-       and add that to known_paths, or execute it if it starts with 'import '.
-    """
-    if known_paths is None:
-        _init_pathinfo()
-        reset = 1
-    else:
-        reset = 0
-    fullname = os.path.join(sitedir, name)
-    try:
-        f = open(fullname, "rU")
-    except IOError:
-        return
-    with f:
-        for n, line in enumerate(f):
-            if line.startswith("#"):
-                continue
-            try:
-                if line.startswith(("import ", "import\t")):
-                    exec line
-                    continue
-                line = line.rstrip()
-                dir, dircase = makepath(sitedir, line)
-                if not dircase in known_paths and os.path.exists(dir):
-                    sys.path.append(dir)
-                    known_paths.add(dircase)
-            except Exception as err:
-                print >>sys.stderr, "Error processing line {:d} of {}:\n".format(
-                    n+1, fullname)
-                for record in traceback.format_exception(*sys.exc_info()):
-                    for line in record.splitlines():
-                        print >>sys.stderr, '  '+line
-                print >>sys.stderr, "\nRemainder of file ignored"
-                break
-    if reset:
-        known_paths = None
-    return known_paths
-
-
-def addsitedir(sitedir, known_paths=None):
-    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
-    'sitedir'"""
-    if known_paths is None:
-        known_paths = _init_pathinfo()
-        reset = 1
-    else:
-        reset = 0
-    sitedir, sitedircase = makepath(sitedir)
-    if not sitedircase in known_paths:
-        sys.path.append(sitedir)        # Add path component
-    try:
-        names = os.listdir(sitedir)
-    except os.error:
-        return
-    dotpth = os.extsep + "pth"
-    names = [name for name in names if name.endswith(dotpth)]
-    for name in sorted(names):
-        addpackage(sitedir, name, known_paths)
-    if reset:
-        known_paths = None
-    return known_paths
-
-
-def check_enableusersite():
-    """Check if user site directory is safe for inclusion
-
-    The function tests for the command line flag (including environment var),
-    process uid/gid equal to effective uid/gid.
-
-    None: Disabled for security reasons
-    False: Disabled by user (command line option)
-    True: Safe and enabled
-    """
-    if sys.flags.no_user_site:
-        return False
-
-    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
-        # check process uid == effective uid
-        if os.geteuid() != os.getuid():
-            return None
-    if hasattr(os, "getgid") and hasattr(os, "getegid"):
-        # check process gid == effective gid
-        if os.getegid() != os.getgid():
-            return None
-
-    return True
-
-def getuserbase():
-    """Returns the `user base` directory path.
-
-    The `user base` directory can be used to store data. If the global
-    variable ``USER_BASE`` is not initialized yet, this function will also set
-    it.
-    """
-    global USER_BASE
-    if USER_BASE is not None:
-        return USER_BASE
-    from sysconfig import get_config_var
-    USER_BASE = get_config_var('userbase')
-    return USER_BASE
-
-def getusersitepackages():
-    """Returns the user-specific site-packages directory path.
-
-    If the global variable ``USER_SITE`` is not initialized yet, this
-    function will also set it.
-    """
-    global USER_SITE
-    user_base = getuserbase() # this will also set USER_BASE
-
-    if USER_SITE is not None:
-        return USER_SITE
-
-    from sysconfig import get_path
-    import os
-
-    if sys.platform == 'darwin':
-        from sysconfig import get_config_var
-        if get_config_var('PYTHONFRAMEWORK'):
-            USER_SITE = get_path('purelib', 'osx_framework_user')
-            return USER_SITE
-
-    USER_SITE = get_path('purelib', '%s_user' % os.name)
-    return USER_SITE
-
-def addusersitepackages(known_paths):
-    """Add a per user site-package to sys.path
-
-    Each user has its own python directory with site-packages in the
-    home directory.
-    """
-    # get the per user site-package path
-    # this call will also make sure USER_BASE and USER_SITE are set
-    user_site = getusersitepackages()
-
-    if ENABLE_USER_SITE and os.path.isdir(user_site):
-        addsitedir(user_site, known_paths)
-    return known_paths
-
-def getsitepackages():
-    """Returns a list containing all global site-packages directories
-    (and possibly site-python).
-
-    For each directory present in the global ``PREFIXES``, this function
-    will find its `site-packages` subdirectory depending on the system
-    environment, and will return a list of full paths.
-    """
-    sitepackages = []
-    seen = set()
-
-    for prefix in PREFIXES:
-        if not prefix or prefix in seen:
-            continue
-        seen.add(prefix)
-
-        if sys.platform in ('os2emx', 'riscos'):
-            sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
-        elif os.sep == '/':
-            sitepackages.append(os.path.join(prefix, "lib",
-                                        "python" + sys.version[:3],
-                                        "site-packages"))
-            sitepackages.append(os.path.join(prefix, "lib", "site-python"))
-        else:
-            sitepackages.append(prefix)
-            sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
-        if sys.platform == "darwin":
-            # for framework builds *only* we add the standard Apple
-            # locations.
-            # DISABLED FOR UV-CDAT!
-            pass
-            #from sysconfig import get_config_var
-            #framework = get_config_var("PYTHONFRAMEWORK")
-            #if framework:
-            #    sitepackages.append(
-            #            os.path.join("/Library", framework,
-            #                sys.version[:3], "site-packages"))
-    return sitepackages
-
-def addsitepackages(known_paths):
-    """Add site-packages (and possibly site-python) to sys.path"""
-    for sitedir in getsitepackages():
-        if os.path.isdir(sitedir):
-            addsitedir(sitedir, known_paths)
-
-    return known_paths
-
-def setBEGINLIBPATH():
-    """The OS/2 EMX port has optional extension modules that do double duty
-    as DLLs (and must use the .DLL file extension) for other extensions.
-    The library search path needs to be amended so these will be found
-    during module import.  Use BEGINLIBPATH so that these are at the start
-    of the library search path.
-
-    """
-    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
-    libpath = os.environ['BEGINLIBPATH'].split(';')
-    if libpath[-1]:
-        libpath.append(dllpath)
-    else:
-        libpath[-1] = dllpath
-    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
-def setquit():
-    """Define new builtins 'quit' and 'exit'.
-
-    These are objects which make the interpreter exit when called.
-    The repr of each object contains a hint at how it works.
-
-    """
-    if os.sep == ':':
-        eof = 'Cmd-Q'
-    elif os.sep == '\\':
-        eof = 'Ctrl-Z plus Return'
-    else:
-        eof = 'Ctrl-D (i.e. EOF)'
-
-    class Quitter(object):
-        def __init__(self, name):
-            self.name = name
-        def __repr__(self):
-            return 'Use %s() or %s to exit' % (self.name, eof)
-        def __call__(self, code=None):
-            # Shells like IDLE catch the SystemExit, but listen when their
-            # stdin wrapper is closed.
-            try:
-                sys.stdin.close()
-            except:
-                pass
-            raise SystemExit(code)
-    __builtin__.quit = Quitter('quit')
-    __builtin__.exit = Quitter('exit')
-
-
-class _Printer(object):
-    """interactive prompt objects for printing the license text, a list of
-    contributors and the copyright notice."""
-
-    MAXLINES = 23
-
-    def __init__(self, name, data, files=(), dirs=()):
-        self.__name = name
-        self.__data = data
-        self.__files = files
-        self.__dirs = dirs
-        self.__lines = None
-
-    def __setup(self):
-        if self.__lines:
-            return
-        data = None
-        for dir in self.__dirs:
-            for filename in self.__files:
-                filename = os.path.join(dir, filename)
-                try:
-                    fp = file(filename, "rU")
-                    data = fp.read()
-                    fp.close()
-                    break
-                except IOError:
-                    pass
-            if data:
-                break
-        if not data:
-            data = self.__data
-        self.__lines = data.split('\n')
-        self.__linecnt = len(self.__lines)
-
-    def __repr__(self):
-        self.__setup()
-        if len(self.__lines) <= self.MAXLINES:
-            return "\n".join(self.__lines)
-        else:
-            return "Type %s() to see the full %s text" % ((self.__name,)*2)
-
-    def __call__(self):
-        self.__setup()
-        prompt = 'Hit Return for more, or q (and Return) to quit: '
-        lineno = 0
-        while 1:
-            try:
-                for i in range(lineno, lineno + self.MAXLINES):
-                    print self.__lines[i]
-            except IndexError:
-                break
-            else:
-                lineno += self.MAXLINES
-                key = None
-                while key is None:
-                    key = raw_input(prompt)
-                    if key not in ('', 'q'):
-                        key = None
-                if key == 'q':
-                    break
-
-def setcopyright():
-    """Set 'copyright' and 'credits' in __builtin__"""
-    __builtin__.copyright = _Printer("copyright", sys.copyright)
-    if sys.platform[:4] == 'java':
-        __builtin__.credits = _Printer(
-            "credits",
-            "Jython is maintained by the Jython developers (www.jython.org).")
-    else:
-        __builtin__.credits = _Printer("credits", """\
-    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
-    for supporting Python development.  See www.python.org for more information.""")
-    here = os.path.dirname(os.__file__)
-    __builtin__.license = _Printer(
-        "license", "See http://www.python.org/%.3s/license.html" % sys.version,
-        ["LICENSE.txt", "LICENSE"],
-        [os.path.join(here, os.pardir), here, os.curdir])
-
-
-class _Helper(object):
-    """Define the builtin 'help'.
-    This is a wrapper around pydoc.help (with a twist).
-
-    """
-
-    def __repr__(self):
-        return "Type help() for interactive help, " \
-               "or help(object) for help about object."
-    def __call__(self, *args, **kwds):
-        import pydoc
-        return pydoc.help(*args, **kwds)
-
-def sethelper():
-    __builtin__.help = _Helper()
-
-def aliasmbcs():
-    """On Windows, some default encodings are not provided by Python,
-    while they are always available as "mbcs" in each locale. Make
-    them usable by aliasing to "mbcs" in such a case."""
-    if sys.platform == 'win32':
-        import locale, codecs
-        enc = locale.getdefaultlocale()[1]
-        if enc.startswith('cp'):            # "cp***" ?
-            try:
-                codecs.lookup(enc)
-            except LookupError:
-                import encodings
-                encodings._cache[enc] = encodings._unknown
-                encodings.aliases.aliases[enc] = 'mbcs'
-
-def setencoding():
-    """Set the string encoding used by the Unicode implementation.  The
-    default is 'ascii', but if you're willing to experiment, you can
-    change this."""
-    encoding = "ascii" # Default value set by _PyUnicode_Init()
-    if 0:
-        # Enable to support locale aware default string encodings.
-        import locale
-        loc = locale.getdefaultlocale()
-        if loc[1]:
-            encoding = loc[1]
-    if 0:
-        # Enable to switch off string to Unicode coercion and implicit
-        # Unicode to string conversion.
-        encoding = "undefined"
-    if encoding != "ascii":
-        # On Non-Unicode builds this will raise an AttributeError...
-        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
-
-
-def execsitecustomize():
-    """Run custom site specific code, if available."""
-    try:
-        import sitecustomize
-    except ImportError:
-        pass
-    except Exception:
-        if sys.flags.verbose:
-            sys.excepthook(*sys.exc_info())
-        else:
-            print >>sys.stderr, \
-                "'import sitecustomize' failed; use -v for traceback"
-
-
-def execusercustomize():
-    """Run custom user specific code, if available."""
-    try:
-        import usercustomize
-    except ImportError:
-        pass
-    except Exception:
-        if sys.flags.verbose:
-            sys.excepthook(*sys.exc_info())
-        else:
-            print>>sys.stderr, \
-                "'import usercustomize' failed; use -v for traceback"
-
-
-def main():
-    global ENABLE_USER_SITE
-
-    abs__file__()
-    known_paths = removeduppaths()
-    if ENABLE_USER_SITE is None:
-        ENABLE_USER_SITE = check_enableusersite()
-    known_paths = addusersitepackages(known_paths)
-    known_paths = addsitepackages(known_paths)
-    if sys.platform == 'os2emx':
-        setBEGINLIBPATH()
-    setquit()
-    setcopyright()
-    sethelper()
-    aliasmbcs()
-    setencoding()
-    execsitecustomize()
-    if ENABLE_USER_SITE:
-        execusercustomize()
-    # Remove sys.setdefaultencoding() so that users cannot change the
-    # encoding after initialization.  The test for presence is needed when
-    # this module is run as a script, because this code is executed twice.
-    if hasattr(sys, "setdefaultencoding"):
-        del sys.setdefaultencoding
-
-main()
-
-def _script():
-    help = """\
-    %s [--user-base] [--user-site]
-
-    Without arguments print some useful information
-    With arguments print the value of USER_BASE and/or USER_SITE separated
-    by '%s'.
-
-    Exit codes with --user-base or --user-site:
-      0 - user site directory is enabled
-      1 - user site directory is disabled by user
-      2 - uses site directory is disabled by super user
-          or for security reasons
-     >2 - unknown error
-    """
-    args = sys.argv[1:]
-    if not args:
-        print "sys.path = ["
-        for dir in sys.path:
-            print "    %r," % (dir,)
-        print "]"
-        print "USER_BASE: %r (%s)" % (USER_BASE,
-            "exists" if os.path.isdir(USER_BASE) else "doesn't exist")
-        print "USER_SITE: %r (%s)" % (USER_SITE,
-            "exists" if os.path.isdir(USER_SITE) else "doesn't exist")
-        print "ENABLE_USER_SITE: %r" %  ENABLE_USER_SITE
-        sys.exit(0)
-
-    buffer = []
-    if '--user-base' in args:
-        buffer.append(USER_BASE)
-    if '--user-site' in args:
-        buffer.append(USER_SITE)
-
-    if buffer:
-        print os.pathsep.join(buffer)
-        if ENABLE_USER_SITE:
-            sys.exit(0)
-        elif ENABLE_USER_SITE is False:
-            sys.exit(1)
-        elif ENABLE_USER_SITE is None:
-            sys.exit(2)
-        else:
-            sys.exit(3)
-    else:
-        import textwrap
-        print textwrap.dedent(help % (sys.argv[0], os.pathsep))
-        sys.exit(10)
-
-if __name__ == '__main__':
-    _script()
diff --git a/pysrc/tcl.sh b/pysrc/tcl.sh
deleted file mode 100755
index d8c7fbf6c20cde3c41a767f552ffe596da814813..0000000000000000000000000000000000000000
--- a/pysrc/tcl.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-# tcl
-cd tcl8*
-cd unix
-if (test "${OS}" = "Darwin") then  # MacIntosh OSX
-   ./configure --prefix=${prefix}/Externals
-else
-   ./configure --disable-shared --prefix=${prefix}/Externals
-fi
-
-if (test $? -ne 0) then
-    echo "tcl configuration failed.";
-    exit 1;
-fi
-make
-if (test $? -ne 0) then
-    echo "tcl make failed.";
-    exit 1;
-fi
-make install
-if (test $? -ne 0) then
-    echo "tcl install failed.";
-    exit 1;
-fi
diff --git a/pysrc/tk.sh b/pysrc/tk.sh
deleted file mode 100755
index 8878bbe754b1abb2a7d5d2dc7b29c1d7363e10c5..0000000000000000000000000000000000000000
--- a/pysrc/tk.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-cd tk8*
-cd unix
-if (test "${OS}" = "Darwin") then  # MacIntosh OSX
-   ./configure --prefix=${prefix}/Externals
-else
-   ./configure --disable-shared --prefix=${prefix}/Externals
-fi
-
-if (test $? -ne 0) then
-    echo "tk configuration failed.";
-    exit 1;
-fi
-make
-if (test $? -ne 0) then
-    echo "tk make failed.";
-    exit 1;
-fi
-make install
-if (test $? -ne 0) then
-    echo "tk installation failed.";
-    exit 1;
-fi
diff --git a/pysrc/zlib.sh b/pysrc/zlib.sh
deleted file mode 100755
index c2497bc94fe81f5d509e87827b0d4b4c2e479360..0000000000000000000000000000000000000000
--- a/pysrc/zlib.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-. ./prolog.sh $*
-cd zlib-*
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-./configure --prefix=${prefix}/Externals
-if (test $? -ne 0) then
-    echo "zlib configuration failed.";
-    exit 1;
-fi
-make
-if (test $? -ne 0) then
-    echo "zlib make failed.";
-    exit 1;
-fi
-make install
-if (test $? -ne 0) then
-    echo "zlib installation failed.";
-    exit 1;
-fi
diff --git a/resources/uvcdat.icns b/resources/uvcdat.icns
deleted file mode 100644
index 3d1efc0aa54111774aafee3e215bf5348a602254..0000000000000000000000000000000000000000
Binary files a/resources/uvcdat.icns and /dev/null differ
diff --git a/resources/uvcdat.jpg b/resources/uvcdat.jpg
deleted file mode 100644
index 16835ba2c63dc69a4f985214044de61314878456..0000000000000000000000000000000000000000
Binary files a/resources/uvcdat.jpg and /dev/null differ
diff --git a/scripts/clean_script b/scripts/clean_script
deleted file mode 100755
index ebf3bfa963552da694483337178be6c6bfe0ffbe..0000000000000000000000000000000000000000
--- a/scripts/clean_script
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-if ( test "$1" = "all" ) then
-   (cd pysrc >/dev/null ; ./clean_script)
-   (cd exsrc >/dev/null; ./clean_script)
-   (cd logs >/dev/null ; /bin/rm *.LOG >/dev/null 2>&1)
-fi
-
-(cd libcdms;./clean_script >cdms.LOG 2>&1; /bin/rm -f cdms.LOG rebuild.py rebuild.LOG >/dev/null 2>&1)
-(cd esg; /bin/rm -fr build *.LOG rebuild.py *.log >/dev/null 2>&1)
-(cd Packages; /bin/rm -fr vcs/cdatwrap */build */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1)
-(cd Packages/visus/src/pyvisus ; /bin/rm -rf build >/dev/null 2>&1)
-(cd contrib;/bin/rm -fr */build */*.o */*.a */*.pyc */Src/*.o */Src/*.a */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1)
-/bin/rm -fr build *.pyc installation/build installation/cdat_info.* *~ rebuild.py  >/dev/null 2>&1
-find . -name 'config.cache' -print -exec rm {} \; 
diff --git a/scripts/get_git_version.sh b/scripts/get_git_version.sh
deleted file mode 100755
index 7d27fa7fe1763c621534dfec861ccb3d07338e79..0000000000000000000000000000000000000000
--- a/scripts/get_git_version.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env sh
-
-if [ "X"${CC} = "X" ] ; then
-    gcc show_git.c -o a.out ; ./a.out ; rm a.out
-else
-   ${CC} show_git.c -o a.out ; ./a.out ; rm a.out
-fi
diff --git a/scripts/git_hooks/commit-msg b/scripts/git_hooks/commit-msg
deleted file mode 100755
index 672bfaae2f01c8e54d705bbbf72629981ea55d11..0000000000000000000000000000000000000000
--- a/scripts/git_hooks/commit-msg
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-# placeholder for custom commit-msg hooks
diff --git a/scripts/git_hooks/pre-commit b/scripts/git_hooks/pre-commit
deleted file mode 100755
index e7b50ac4ae23934723219f5a99b2306561624211..0000000000000000000000000000000000000000
--- a/scripts/git_hooks/pre-commit
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-# Reject commits directly to 'master' to encourage use of topic branches.
-if test -z "$HOOKS_ALLOW_COMMIT_MASTER"; then
-  if git symbolic-ref HEAD | egrep -q '^refs/heads/master$'; then
-    echo 'Please do not commit directly to "master".  Create a topic instead:
-
- git checkout -b my-topic
- git commit
-'
-    exit 1
-  fi
-fi
diff --git a/scripts/git_hooks/pre-push b/scripts/git_hooks/pre-push
deleted file mode 100755
index 424f890f9a753338a401b6d170e37020a458b84a..0000000000000000000000000000000000000000
--- a/scripts/git_hooks/pre-push
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-# Reject pushes directly to 'master' to encourage use of topic branches.
-if test -z "$HOOKS_ALLOW_PUSH_MASTER"; then
-  while IFS=' ' read local_ref local_sha1 remote_ref remote_sha1; do
-    if test "x$remote_ref" = "xrefs/heads/master"; then
-      echo 'Please do not push directly to "master".  Push to a topic instead:
-
-  git push '"$1"' '"$local_ref"':my-topic
-'
-      exit 1
-    fi
-  done
-fi
diff --git a/scripts/git_hooks/prepare-commit-msg b/scripts/git_hooks/prepare-commit-msg
deleted file mode 100755
index 1571a7d203f0f24c55216a386f42cac81606797a..0000000000000000000000000000000000000000
--- a/scripts/git_hooks/prepare-commit-msg
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-# placeholder for custom prepare-commit-msg hooks
diff --git a/scripts/git_setup/.gitattributes b/scripts/git_setup/.gitattributes
deleted file mode 100644
index 3323f94b9bf89c214c31ec264378e6c1d7916353..0000000000000000000000000000000000000000
--- a/scripts/git_setup/.gitattributes
+++ /dev/null
@@ -1,9 +0,0 @@
-.git*            export-ignore
-
-# Exclude from source archives files specific to Git work tree.
-*                export-ignore
-
-config*          eol=lf         whitespace=indent-with-non-tab
-git-*            eol=lf         whitespace=indent-with-non-tab
-tips             eol=lf         whitespace=indent-with-non-tab
-setup-*          eol=lf         whitespace=indent-with-non-tab
diff --git a/scripts/git_setup/LICENSE b/scripts/git_setup/LICENSE
deleted file mode 100644
index d645695673349e3947e8e5ae42332d0ac3164cd7..0000000000000000000000000000000000000000
--- a/scripts/git_setup/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/scripts/git_setup/NOTICE b/scripts/git_setup/NOTICE
deleted file mode 100644
index 0d32c02eb6920286d1effdb02031aef830dbc875..0000000000000000000000000000000000000000
--- a/scripts/git_setup/NOTICE
+++ /dev/null
@@ -1,5 +0,0 @@
-Kitware Local Git Setup Scripts
-Copyright 2010-2012 Kitware, Inc.
-
-This product includes software developed at Kitware, Inc.
-(http://www.kitware.com/).
diff --git a/scripts/git_setup/README b/scripts/git_setup/README
deleted file mode 100644
index cf468fb68bc35a233e88591f60279b834302583d..0000000000000000000000000000000000000000
--- a/scripts/git_setup/README
+++ /dev/null
@@ -1,80 +0,0 @@
-Kitware Local Git Setup Scripts
-
-
-Introduction
-------------
-
-This is a collection of local Git development setup scripts meant for
-inclusion in project source trees to aid their development workflow.
-Project-specific information needed by the scripts may be configured
-in a "config" file added next to them in the project.
-
-
-Import
-------
-
-A project may import these scripts into their source tree by
-initializing a subtree merge.  Bring up a Git prompt and set the
-current working directory inside a clone of the target project.
-Fetch the "setup" branch from the GitSetup repository:
-
- $ git fetch ../GitSetup setup:setup
-
-Prepare to merge the branch but place the content in a subdirectory.
-Any prefix (with trailing '/') may be chosen so long as it is used
-consistently within a project through the rest of these instructions:
-
- $ git merge -s ours --no-commit setup
- $ git read-tree -u --prefix=Utilities/GitSetup/ setup
-
-Commit the merge with an informative message:
-
- $ git commit
- ------------------------------------------------------------------------
- Merge branch 'setup'
-
- Add Utilities/GitSetup/ directory using subtree merge from
- the general GitSetup repository "setup" branch.
- ------------------------------------------------------------------------
-
-
-Configuration
--------------
-
-Read the "Project configuration instructions" comment in each script.
-Add a "config" file next to the scripts with desired configuration
-(optionally copy and modify "config.sample").  For example, to
-configure the "setup-hooks" script:
-
- $ git config -f Utilities/GitSetup/config hooks.url "$url"
-
-where "$url" is the project repository publishing the "hooks" branch.
-When finished, add and commit the configuration file:
-
- $ git add Utilities/GitSetup/config
- $ git commit
-
-
-Update
-------
-
-A project may update these scripts from the GitSetup repository.
-Bring up a Git prompt and set the current working directory inside a
-clone of the target project.  Fetch the "setup" branch from the
-GitSetup repository:
-
- $ git fetch ../GitSetup setup:setup
-
-Merge the "setup" branch into the subtree:
-
- $ git merge -X subtree=Utilities/GitSetup setup
-
-where "Utilities/GitSetup" is the same prefix used during the import
-setup, but without a trailing '/'.
-
-
-License
--------
-
-Distributed under the Apache License 2.0.
-See LICENSE and NOTICE for details.
diff --git a/scripts/git_setup/config b/scripts/git_setup/config
deleted file mode 100644
index 6fd06e8cc03ce6864d549fc378b74cb7fe114601..0000000000000000000000000000000000000000
--- a/scripts/git_setup/config
+++ /dev/null
@@ -1,2 +0,0 @@
-[hooks]
-	url = https://github.com/UV-CDAT/uvcdat
diff --git a/scripts/git_setup/config.sample b/scripts/git_setup/config.sample
deleted file mode 100644
index bba2382c3cad8e572aec867bf9991e9b2b71928e..0000000000000000000000000000000000000000
--- a/scripts/git_setup/config.sample
+++ /dev/null
@@ -1,22 +0,0 @@
-# Kitware Local Git Setup Scripts - Sample Project Configuration
-#
-# Copy to "config" and edit as necessary.
-
-[hooks]
-	url = http://public.kitware.com/GitSetup.git
-	#branch = hooks
-
-[ssh]
-	host = public.kitware.com
-	key = id_git_public
-	request-url = https://www.kitware.com/Admin/SendPassword.cgi
-
-[stage]
-	#url = git://public.kitware.com/stage/Project.git
-	#pushurl = git@public.kitware.com:stage/Project.git
-
-[gerrit]
-	#project = Project
-	site = http://review.source.kitware.com
-	# pushurl placeholder "$username" is literal
-	pushurl = $username@review.source.kitware.com:Project
diff --git a/scripts/git_setup/git-gerrit-push b/scripts/git_setup/git-gerrit-push
deleted file mode 100755
index 2471490c25ba59b85d2a4f8bc38360642fa613ba..0000000000000000000000000000000000000000
--- a/scripts/git_setup/git-gerrit-push
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-USAGE="[<remote>] [--no-topic] [--dry-run] [--]"
-OPTIONS_SPEC=
-SUBDIRECTORY_OK=Yes
-. "$(git --exec-path)/git-sh-setup"
-
-#-----------------------------------------------------------------------------
-
-remote=''
-refspecs=''
-no_topic=''
-dry_run=''
-
-# Parse the command line options.
-while test $# != 0; do
-	case "$1" in
-		--no-topic) no_topic=1 ;;
-		--dry-run)  dry_run=--dry-run ;;
-		--) shift; break ;;
-		-*) usage ;;
-		*) test -z "$remote" || usage ; remote="$1" ;;
-	esac
-	shift
-done
-test $# = 0 || usage
-
-# Default remote.
-test -n "$remote" || remote="gerrit"
-
-if test -z "$no_topic"; then
-	# Identify and validate the topic branch name.
-	topic="$(git symbolic-ref HEAD | sed -e 's|^refs/heads/||')"
-	if test "$topic" = "master"; then
-		die 'Please name your topic:
-		git checkout -b descriptive-name'
-	fi
-	refspecs="HEAD:refs/for/master/$topic"
-fi
-
-# Exit early if we have nothing to push.
-if test -z "$refspecs"; then
-	echo "Nothing to push!"
-	exit 0
-fi
-
-# Fetch the current upstream master branch head.
-# This helps the computation of a minimal pack to push.
-echo "Fetching $remote master"
-fetch_out=$(git fetch "$remote" master 2>&1) || die "$fetch_out"
-
-# Push.  Save output and exit code.
-echo "Pushing to $remote"
-push_stdout=$(git push --porcelain $dry_run "$remote" $refspecs); push_exit=$?
-echo "$push_stdout"
-
-# Reproduce the push exit code.
-exit $push_exit
diff --git a/scripts/git_setup/setup-gerrit b/scripts/git_setup/setup-gerrit
deleted file mode 100755
index 6d46e3ccf54cef8313e6b3bfe3c76abe6f17cd1d..0000000000000000000000000000000000000000
--- a/scripts/git_setup/setup-gerrit
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up the local Git repository to push to
-# a Gerrit Code Review instance for this project.
-
-# Project configuration instructions:
-#
-# - Run a Gerrit Code Review server
-#
-# - Populate adjacent "config" file with:
-#    gerrit.site = Top Gerrit URL (not project-specific)
-#    gerrit.project = Name of project in Gerrit
-#    gerrit.pushurl = Review site push URL with "$username" placeholder
-#    gerrit.remote = Gerrit remote name, if not "gerrit"
-#    gerrit.url = Gerrit project URL, if not "$site/p/$project"
-#                 optionally with "$username" placeholder
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Load the project configuration.
-site=$(git config -f config --get gerrit.site) &&
-project=$(git config -f config --get gerrit.project) &&
-remote=$(git config -f config --get gerrit.remote ||
-	 echo "gerrit") &&
-fetchurl_=$(git config -f config --get gerrit.url ||
-	    echo "$site/p/$project") &&
-pushurl_=$(git config -f config --get gerrit.pushurl ||
-	   git config -f config --get gerrit.url) ||
-die 'This project is not configured to use Gerrit.'
-
-# Get current gerrit push URL.
-pushurl=$(git config --get remote."$remote".pushurl ||
-	  git config --get remote."$remote".url || echo '') &&
-
-# Tell user about current configuration.
-if test -n "$pushurl"; then
-	echo 'Remote "'"$remote"'" is currently configured to push to
-
-  '"$pushurl"'
-' &&
-	read -ep 'Reconfigure Gerrit? [y/N]: ' ans &&
-	if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then
-		setup=1
-	else
-		setup=''
-	fi
-else
-	echo 'Remote "'"$remote"'" is not yet configured.
-
-'"$project"' changes must be pushed to our Gerrit Code Review site:
-
-  '"$site/p/$project"'
-
-Register a Gerrit account and select a username (used below).
-You will need an OpenID:
-
-  http://openid.net/get-an-openid/
-' &&
-	read -ep 'Configure Gerrit? [Y/n]: ' ans &&
-	if [ "$ans" == "n" ] || [ "$ans" == "N" ]; then
-		exit 0
-	else
-		setup=1
-	fi
-fi &&
-
-# Perform setup if necessary.
-if test -n "$setup"; then
-	echo 'Sign-in to Gerrit to get/set your username at
-
-  '"$site"'/#/settings
-
-Add your SSH public keys at
-
-  '"$site"'/#/settings/ssh-keys
-' &&
-	read -ep "Gerrit username? [$USER]: " gu &&
-	if test -z "$gu"; then
-		gu="$USER"
-	fi &&
-	fetchurl="${fetchurl_/\$username/$gu}" &&
-	if test -z "$pushurl"; then
-		git remote add "$remote" "$fetchurl"
-	else
-		git config remote."$remote".url "$fetchurl"
-	fi &&
-	pushurl="${pushurl_/\$username/$gu}" &&
-	if test "$pushurl" != "$fetchurl"; then
-		git config remote."$remote".pushurl "$pushurl"
-	fi &&
-	echo 'Remote "'"$remote"'" is now configured to push to
-
-  '"$pushurl"'
-'
-fi &&
-
-# Optionally test Gerrit access.
-if test -n "$pushurl"; then
-	read -ep 'Test access to Gerrit (SSH)? [y/N]: ' ans &&
-	if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then
-		echo -n 'Testing Gerrit access by SSH...'
-		if git ls-remote --heads "$pushurl" >/dev/null; then
-			echo 'passed.'
-		else
-			echo 'failed.' &&
-			die 'Could not access Gerrit.  Add your SSH public keys at
-
-  '"$site"'/#/settings/ssh-keys
-'
-		fi
-	fi
-fi &&
-
-# Set up GerritId hook.
-hook=$(git config --get hooks.GerritId || echo '') &&
-if test -z "$hook"; then
-	echo '
-Enabling GerritId hook to add a "Change-Id" footer to commit
-messages for interaction with Gerrit.  Run
-
-  git config hooks.GerritId false
-
-to disable this feature (but you will be on your own).' &&
-	git config hooks.GerritId true
-else
-	echo 'GerritId hook already configured to "'"$hook"'".'
-fi
diff --git a/scripts/git_setup/setup-hooks b/scripts/git_setup/setup-hooks
deleted file mode 100755
index c07985ae5661658139ae7aff4353cfb374c35bc1..0000000000000000000000000000000000000000
--- a/scripts/git_setup/setup-hooks
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up local Git hooks for this project.
-
-# Project configuration instructions:
-#
-# - Publish a "hooks" branch in the project repository such that
-#   clones will have "refs/remotes/origin/hooks".
-#
-# - Populate adjacent "config" file with:
-#    hooks.url    = Repository URL publishing "hooks" branch
-#    hooks.branch = Repository branch instead of "hooks"
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Select a hooks branch.
-if url=$(git config --get hooks.url); then
-	# Fetch hooks from locally configured repository.
-	branch=$(git config hooks.branch || echo hooks)
-elif git for-each-ref refs/remotes/origin/hooks 2>/dev/null |
-     egrep-q 'refs/remotes/origin/hooks$'; then
-	# Use hooks cloned from origin.
-	url=.. && branch=remotes/origin/hooks
-elif url=$(git config -f config --get hooks.url); then
-	# Fetch hooks from project-configured repository.
-	branch=$(git config -f config hooks.branch || echo hooks)
-else
-	die 'This project is not configured to install local hooks.'
-fi &&
-
-# Populate ".git/hooks".
-echo 'Setting up git hooks...' &&
-git_dir=$(git rev-parse --git-dir) &&
-cd "$git_dir/hooks" &&
-if ! test -e .git; then
-	git init -q || die 'Could not run git init for hooks.'
-fi &&
-git fetch -q "$url" "$branch" &&
-git reset -q --hard FETCH_HEAD || die 'Failed to install hooks'
diff --git a/scripts/git_setup/setup-ssh b/scripts/git_setup/setup-ssh
deleted file mode 100755
index 8920a5bd338b046f9b9822a874df7c624da2f237..0000000000000000000000000000000000000000
--- a/scripts/git_setup/setup-ssh
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up ssh push access to the repository host.
-
-# Project configuration instructions:
-#
-# - Populate adjacent "config" file with:
-#    ssh.host = Repository host name
-#    ssh.user = Username on host, if not "git"
-#    ssh.key = Local ssh key name
-#    ssh.request-url = Web page URL to request ssh access
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Load the project configuration.
-host=$(git config -f config --get ssh.host) &&
-user=$(git config -f config --get ssh.user || echo git) &&
-key=$(git config -f config --get ssh.key) &&
-request_url=$(git config -f config --get ssh.request-url) ||
-die 'This project is not configured for ssh push access.'
-
-# Check for existing configuration.
-if test -r ~/.ssh/config &&
-   egrep-q 'Host[= ]'"${host//\./\\.}" ~/.ssh/config; then
-	echo 'Host "'"$host"'" is already in ~/.ssh/config' &&
-	setup= &&
-	question='Test'
-else
-	echo 'Host "'"$host"'" not found in ~/.ssh/config' &&
-	setup=1 &&
-	question='Setup and test'
-fi &&
-
-# Ask the user whether to make changes.
-echo '' &&
-read -ep "${question} push access by ssh to $user@$host? [y/N]: " access &&
-if test "$access" != "y" -a "$access" != "Y"; then
-	exit 0
-fi &&
-
-# Setup host configuration if necessary.
-if test -n "$setup"; then
-	if ! test -d ~/.ssh; then
-		mkdir -p ~/.ssh &&
-		chmod 700 ~/.ssh
-	fi &&
-	if ! test -f ~/.ssh/config; then
-		touch ~/.ssh/config &&
-		chmod 600 ~/.ssh/config
-	fi &&
-	ssh_config='Host='"$host"'
-  IdentityFile ~/.ssh/'"$key" &&
-	echo "Adding to ~/.ssh/config:
-
-$ssh_config
-" &&
-	echo "$ssh_config" >> ~/.ssh/config &&
-	if ! test -e ~/.ssh/"$key"; then
-		if test -f ~/.ssh/id_rsa; then
-			# Take care of the common case.
-			ln -s id_rsa ~/.ssh/"$key"
-			echo '
-Assuming ~/.ssh/id_rsa is the private key corresponding to the public key for
-
-  '"$user@$host"'
-
-If this is incorrect place private key at "~/.ssh/'"$key"'".'
-		else
-			echo '
-Place the private key corresponding to the public key registered for
-
-  '"$user@$host"'
-
-at "~/.ssh/'"$key"'".'
-		fi
-		read -e -n 1 -p 'Press any key to continue...'
-	fi
-fi || exit 1
-
-# Test access configuration.
-echo 'Testing ssh push access to "'"$user@$host"'"...' &&
-if ! ssh "$user@$host" info; then
-	die 'No ssh push access to "'"$user@$host"'".  You may need to request access at
-
-  '"$request_url"'
-'
-fi
diff --git a/scripts/git_setup/setup-stage b/scripts/git_setup/setup-stage
deleted file mode 100755
index ce6ec457487ed486acf799a783fe9a161fad5111..0000000000000000000000000000000000000000
--- a/scripts/git_setup/setup-stage
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up the topic stage for pushing changes.
-
-# Project configuration instructions:
-#
-# - Run a Topic Stage repository next to the main project repository.
-#
-# - Populate adjacent "config" file with:
-#    stage.url = Topic Stage repository URL
-#    stage.pushurl = Topic Stage push URL if not "$url"
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Load the project configuration.
-fetchurl_=$(git config -f config --get stage.url) &&
-pushurl_=$(git config -f config --get stage.pushurl || echo "$fetchurl_") &&
-remote=$(git config -f config --get stage.remote || echo 'stage') ||
-die 'This project is not configured to use a topic stage.'
-
-# Get current stage push URL.
-pushurl=$(git config --get remote."$remote".pushurl ||
-	  git config --get remote."$remote".url || echo '') &&
-
-# Tell user about current configuration.
-if test -n "$pushurl"; then
-	echo 'Remote "'"$remote"'" is currently configured to push to
-
-  '"$pushurl"'
-' &&
-	read -ep 'Reconfigure Topic Stage? [y/N]: ' ans &&
-	if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then
-		setup=1
-	else
-		setup=''
-	fi
-else
-	setup=1
-fi
-
-# Perform setup if necessary.
-if test -n "$setup"; then
-	echo 'Setting up the topic stage...' &&
-	fetchurl="${fetchurl_}" &&
-	if test -z "$pushurl"; then
-		git remote add "$remote" "$fetchurl"
-	else
-		git config remote."$remote".url "$fetchurl"
-	fi &&
-	pushurl="${pushurl_}" &&
-	if test "$pushurl" != "$fetchurl"; then
-		git config remote."$remote".pushurl "$pushurl"
-	fi &&
-	echo 'Remote "'"$remote"'" is now configured to push to
-
-  '"$pushurl"'
-'
-fi || die 'Could not configure the topic stage remote.'
diff --git a/scripts/git_setup/setup-user b/scripts/git_setup/setup-user
deleted file mode 100755
index 1af439c45e4c1862da78a947ddf74db6e167e42a..0000000000000000000000000000000000000000
--- a/scripts/git_setup/setup-user
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to configure Git user info in this repository.
-
-# Project configuration instructions: NONE
-
-for (( ; ; )); do
-	user_name=$(git config user.name || echo '') &&
-	user_email=$(git config user.email || echo '') &&
-	if test -n "$user_name" -a -n "$user_email"; then
-		echo 'Your commits will record as Author:
-
-  '"$user_name <$user_email>"'
-' &&
-		read -ep 'Is the author name and email address above correct? [Y/n] ' correct &&
-		if test "$correct" != "n" -a "$correct" != "N"; then
-			break
-		fi
-	fi &&
-	read -ep 'Enter your full name e.g. "John Doe": ' name &&
-	read -ep 'Enter your email address e.g. "john@gmail.com": ' email &&
-	git config user.name "$name" &&
-	git config user.email "$email"
-done
diff --git a/scripts/git_setup/setup_aliases.sh b/scripts/git_setup/setup_aliases.sh
deleted file mode 100755
index 9771708161ecb5855cc9d6a5dc8dbaa69cc25b5b..0000000000000000000000000000000000000000
--- a/scripts/git_setup/setup_aliases.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-echo "Setting up useful Git aliases..." &&
-
-# General aliases that could be global
-git config alias.prepush 'log --graph --stat origin/master..' &&
-
-true
diff --git a/scripts/git_setup/tips b/scripts/git_setup/tips
deleted file mode 100755
index 784e1ed890d3facb2bf9e983a19bbd2abdd90ff1..0000000000000000000000000000000000000000
--- a/scripts/git_setup/tips
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# This script makes optional suggestions for working with Git.
-
-# Project configuration instructions: NONE
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-# Suggest color configuration.
-if test -z "$(git config --get color.ui)"; then
-	echo '
-One may enable color output from Git commands with
-
-  git config --global color.ui auto
-'
-fi
-
-# Suggest bash completion.
-if ! bash -i -c 'echo $PS1' | egrep-q '__git_ps1'; then
-	echo '
-A dynamic, informative Git shell prompt can be obtained by sourcing
-the git bash-completion script in your "~/.bashrc".  Set the PS1
-environmental variable as suggested in the comments at the top of the
-bash-completion script.  You may need to install the bash-completion
-package from your distribution to obtain it.
-'
-fi
-
-# Suggest merge tool.
-if test -z "$(git config --get merge.tool)"; then
-	echo '
-One may configure Git to load a merge tool with
-
-  git config merge.tool <toolname>
-
-See "git help mergetool" for more information.
-'
-fi
diff --git a/scripts/last_update_time.py b/scripts/last_update_time.py
deleted file mode 100644
index a0bd0aed4cca3aea8bcba7f50532c2477c10f10a..0000000000000000000000000000000000000000
--- a/scripts/last_update_time.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import os
-
-ln=os.popen('p4 changes -m 1 //depot/main/...').readlines()
-
-
-for l in ln:
-    sp=l.split()
-    date='_'.join(sp[3].split('/'))
-    date=sp[3]
-    print 'Last change on:',date, 'for more info run: p4 changes -m 1 //depot/main/...'
-    
-    
diff --git a/scripts/nightly.sh b/scripts/nightly.sh
deleted file mode 100755
index 29e23fa35700d247dd559a3df50c4e86187f11b9..0000000000000000000000000000000000000000
--- a/scripts/nightly.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-
-## script to build automatically (UV-)CDAT
-
-INSTALL_PATH=$1
-GIT_BRANCH=$2
-GIT_PATH=$3
-QMAKE_EXE=$4
-XTRA_ARGS=$5
-
-here=`pwd`
-
-: ${INSTALL_PATH:="/lgm/uvcdat/nightly"}
-: ${GIT_BRANCH:="next"}
-: ${GIT_PATH:="/git/uv-cdat"}
-: ${QMAKE_EXE:="/usr/bin/qmake"}
-#: ${XTRA_ARGS:="-DCDAT_USE_LIBXML2=ON -DCDAT_USE_SYSTEM_PNG=ON"}
-
-echo "XTRA_ARGS:"${XTRA_ARGS}
-
-cd ${GIT_PATH} ; \
-git checkout ${GIT_BRANCH} ; \
-git pull ; \
-/bin/rm -rf ${INSTALL_PATH} ; \
-cd ${here} ; \
-rm -rf build_nightly ;\ 
-mkdir build_nightly ;\
-cd build_nightly ;\
-cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \
-cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \
-pwd ; \
-make -j16 ; \
-
-
diff --git a/scripts/setup_for_development.sh b/scripts/setup_for_development.sh
deleted file mode 100755
index 190f39a816b13e4490bba617f32893631c29bd84..0000000000000000000000000000000000000000
--- a/scripts/setup_for_development.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-cd "${BASH_SOURCE%/*}/.." &&
-scripts/git_setup/setup-user && echo &&
-scripts/git_setup/setup-hooks && echo &&
-scripts/git_setup/setup_aliases.sh && echo &&
-scripts/git_setup/tips
-
-# Rebase master by default
-git config branch.master.rebase true
-
-# Configure remote push URL.
-if url="$(git config --get remote.origin.url)" &&
-   echo "$url" | egrep -q '^(https?|git)://github.com/UV-CDAT/uvcdat(\.git)?$' &&
-   ! pushurl="$(git config --get remote.origin.pushurl)"; then
-  pushurl='git@github.com:UV-CDAT/uvcdat.git'
-  echo 'Setting origin pushurl to '"$pushurl"
-  git config remote.origin.pushurl "$pushurl"
-fi
diff --git a/scripts/tarballit.sh b/scripts/tarballit.sh
deleted file mode 100755
index 1217260c6e26b080478f87013e6572c4c1a1880f..0000000000000000000000000000000000000000
--- a/scripts/tarballit.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env sh
-
-svn export http://www-pcmdi.llnl.gov/svn/repository/cdat/trunk $1
-tar czvf $1-everything.tar.gz  $1
-tar czvf $1-cdat.tar.gz --exclude $1/pysrc* --exclude $1/exsrc*  $1
-tar czvf $1-pysrc.tar.gz $1/pysrc
-tar czvf $1-exsrc.tar.gz $1/exsrc
diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt
index 2294881725e3a282cdab65242b6fe1d8ea31142f..f890a8e22fed59b69449d7a8433015fe66202e8d 100644
--- a/testing/CMakeLists.txt
+++ b/testing/CMakeLists.txt
@@ -1,5 +1,5 @@
-# Disabling GUI tests as they don't work
-#add_subdirectory(uvcdat)
+set(PYTHON_EXECUTABLE python)
+set(CDAT_DOWNLOAD_SAMPLE_DATA ON)
 
 # Helper macro that sets the environment correctly
 macro (cdat_add_test name)
@@ -12,8 +12,10 @@ macro (cdat_add_test name)
     endif()
   endif()
 
-  add_test(${name} "${CMAKE_INSTALL_PREFIX}/bin/runtest"
-           ${ARGS})
+  add_test(${name} ${cdat_CMAKE_BINARY_DIR}/runtest  ${ARGS})
+  if ( NOT (${name} STREQUAL download_sample_data ))
+      set_tests_properties(${name} PROPERTIES DEPENDS download_sample_data)
+  endif()
 
   if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
     set_tests_properties (${name}
@@ -25,48 +27,20 @@ macro (cdat_add_test name)
     )
   endif()
 endmacro()
-if (CDAT_BUILD_GRAPHICS)
-  add_subdirectory(regrid)
-  add_subdirectory(vcs)
-  add_subdirectory(vcsaddons)
-  add_subdirectory(dv3d)
-endif()
+
+#separate_arguments(DOWNLOAD_ARGS)
+# make sure data is downloaded
+cdat_add_test(download_sample_data
+    vcs_download_sample_data
+    )
+
+add_subdirectory(regrid)
+add_subdirectory(vcs)
+add_subdirectory(vcsaddons)
+add_subdirectory(dv3d)
 add_subdirectory(cdutil)
 add_subdirectory(Thermo)
 add_subdirectory(unidata)
 add_subdirectory(cdms2)
 add_subdirectory(xmgrace)
-if (CDAT_BUILD_OCGIS)
-  add_subdirectory(ocgis)
-endif()
-if (CDAT_BUILD_UVCMETRICSPKG)
-  add_subdirectory(metrics)
-endif()
-
-# Disabling ParaView tests
-#if (CDAT_BUILD_PARAVIEW)
-#  add_subdirectory(paraview)
-#endif()
-
-# Test RPY2
-if (CDAT_BUILD_RPY2)
-  add_subdirectory(rpy2)
-endif()
-
-# Test Matplotlib
-if (CDAT_BUILD_MATPLOTLIB)
-  add_subdirectory(matplotlib)
-endif()
-
-# PCMDI Tools
-if (CDAT_BUILD_PCMDI)
-    add_subdirectory(pcmdi)
-endif()
-
-# CMake module tests:
-# Test that out-of-source build detection is working:
-add_test(cmake_checkBuildOutOfSource
-  "${CMAKE_COMMAND}"
-    -DTEST_check_build_out_of_source=ON
-    -P "${cdat_SOURCE_DIR}/CMake/cmake_modules/CheckBuildOutOfSource.cmake"
-)
+#add_subdirectory(pcmdi)
diff --git a/testing/Thermo/CMakeLists.txt b/testing/Thermo/CMakeLists.txt
index bae57cea12d7739c0657a9224e434b0a7837bffd..c855dc95349ff6aa439e0457f8bfd91742d2d0e0 100644
--- a/testing/Thermo/CMakeLists.txt
+++ b/testing/Thermo/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test(flake8_Thermo
-  "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/"
+cdat_add_test(flake8_Thermo
+  flake8 "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/"
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=120 # Reasonable line length
diff --git a/testing/cdutil/test_vert.py b/testing/cdutil/test_vert.py
index 591ac6130661281178a022223fea534e9d8f2f99..54f9c625a090d75b10bbae9fb69313eb1a56f435 100644
--- a/testing/cdutil/test_vert.py
+++ b/testing/cdutil/test_vert.py
@@ -2,22 +2,24 @@
 # Adapted for numpy/ma/cdms2 by convertcdms.py
 
 
-import cdutil,cdat_info
+import cdutil
+import cdat_info
+import numpy
 
 import cdms2
 import os
 bg = 0
 
-f = cdms2.open(os.path.join(cdat_info.get_sampledata_path(),'vertical.nc'))
-Ps=f('PS')
-U=f('U')
-B=f('hybm')
-A=f('hyam')
-Po=f('variable_2')
-P=cdutil.reconstructPressureFromHybrid(Ps,A,B,Po)
+f = cdms2.open(os.path.join(cdat_info.get_sampledata_path(), 'vertical.nc'))
+Ps = f('PS')
+U = f('U')
+B = f('hybm')
+A = f('hyam')
+Po = f('variable_2')
+P = cdutil.reconstructPressureFromHybrid(Ps, A, B, Po)
 
-U2=cdutil.logLinearInterpolation(U,P)
-
-#x=vcs.init()
-#x.plot(U2,bg=bg)
-#raw_input()
+U2 = cdutil.logLinearInterpolation(U, P)
+U2b = cdutil.logLinearInterpolation(U, P, axis='0')
+assert(numpy.ma.allclose(U2, U2b))
+U2b = cdutil.logLinearInterpolation(U, P, axis='(lev)')
+assert(numpy.ma.allclose(U2, U2b))
diff --git a/testing/dv3d/TestManager.py b/testing/dv3d/TestManager.py
index 51ed57183a7e28edd7c9265a39b9979e9c5e7a04..5b0aa208e4e02191dae1b1f82be38d11c94ae7bb 100644
--- a/testing/dv3d/TestManager.py
+++ b/testing/dv3d/TestManager.py
@@ -10,7 +10,7 @@ import vcs, os, sys, shutil, collections, subprocess
 TestingDir=os.path.dirname(__file__)
 pth = os.path.join(TestingDir,"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
 DefaultSampleFile = "geos5-sample.nc"
 DefaultSampleVar = "uwnd"
@@ -106,9 +106,8 @@ class vcsTest:
 
         plot_kwargs = { 'cdmsfile': self.file.id, 'window_size': (900,600) }
         self.canvas.setantialiasing(False)
-        self.canvas.plot( *plot_args, **plot_kwargs )
-        self.plot = self.canvas.backend.plotApps[ self.gm ]
-#        self.applyActions()
+        display = self.canvas.plot( *plot_args, **plot_kwargs )
+        self.plot = self.canvas.backend.plotApps[ vcs.elements[display.g_type][display.g_name] ]
 
     def applyActions(self):
         for action in self.actions:
@@ -128,8 +127,8 @@ class vcsTest:
         test_image = '.'.join( [ self.name, 'test', 'png' ] )
         self.canvas.png( test_image, width = 900, height = 600 )
 
-        ret = checkimage.check_result_image( test_image, self.image_name,\
-                checkimage.defaultThreshold+3. )
+        ret = regression.check_result_image( test_image, self.image_name,\
+                regression.defaultThreshold+3. )
 
         if  interactive:
             print "Type <Enter> to continue and update ref image ( type 'n' to skip update )."
diff --git a/testing/metrics/diags_test.py b/testing/metrics/diags_test.py
index ff71c8d66a87bae57008225b80066ad2b60660bc..2d4131cb80972fdaaa09492cfd859f9c3c55c771 100755
--- a/testing/metrics/diags_test.py
+++ b/testing/metrics/diags_test.py
@@ -12,7 +12,7 @@ import sys, os, shutil, tempfile, subprocess
 import cdms2, numpy
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import argparse, pdb
 
 class DiagTest(object):
@@ -95,7 +95,7 @@ class DiagTest(object):
     def execute(self, test_str, imagefilename, imagethreshold, ncfiles, rtol, atol):
         print test_str
         if imagethreshold is None:  # user didn't specify a value
-     	    imagethreshold = checkimage.defaultThreshold
+	    imagethreshold = regression.defaultThreshold
         # Silence annoying messages about how to set the NetCDF file type.  Anything will do.
         cdms2.setNetcdfShuffleFlag(0)
         cdms2.setNetcdfDeflateFlag(0)
@@ -118,7 +118,7 @@ class DiagTest(object):
             imagebaselinefname = os.path.join( self.baselinepath, imagefilename )
             #pdb.set_trace()
             print "OK THRESHOLD IS:",imagethreshold
-            graphics_result = checkimage.check_result_image( imagefname, imagebaselinefname, imagethreshold )
+            graphics_result = regression.check_result_image( imagefname, imagebaselinefname, imagethreshold )
             print "Graphics file", imagefname, "match difference:", graphics_result
             
             #initialize to successful graphics check
diff --git a/testing/metrics/diagtest01.py b/testing/metrics/diagtest01.py
index b8cf415f0a89644a5d34ff0df2b89251f1efc36c..09197a1e3a9e370e23620fea8fb8129bf370af89 100755
--- a/testing/metrics/diagtest01.py
+++ b/testing/metrics/diagtest01.py
@@ -58,7 +58,7 @@ varid = 'T'
 vard = package.all_variables( filetable1, filetable2, sname )
 var = vard[varid]
 
-plot = sclass( [filetable1], [filetable2], varid, seasonid )
+plot = sclass( [filetable1], [filetable2], varid, seasonid, plotparms = { 'model':{}, 'obs':{}, 'diff':{} } )
 res = plot.compute()
 if res is not None:
     if res.__class__.__name__ is 'uvc_composite_plotspec':
diff --git a/testing/regrid/testDistSrc.py b/testing/regrid/testDistSrc.py
index 183efd4982f501eadf6f3dc0614f8722ab8dbdab..9da231af57987a3aba2a4cfa880670550c3ab6de 100644
--- a/testing/regrid/testDistSrc.py
+++ b/testing/regrid/testDistSrc.py
@@ -14,7 +14,7 @@ import regrid2
 import unittest
 import ESMP
 from regrid2 import esmf
-import scipy.io.netcdf
+#import scipy.io.netcdf
 from regrid2 import ESMFRegrid
 import sys
 HAS_MPI = False
@@ -75,7 +75,7 @@ class Test(unittest.TestCase):
 
     def Xtest0_ESMP(self):
 
-        import scipy.io.netcdf
+        #import scipy.io.netcdf
 
         #
         # 1. input
@@ -85,7 +85,8 @@ class Test(unittest.TestCase):
 
         inFile = cdat_info.get_sampledata_path() + \
             '/so_Omon_ACCESS1-0_historical_r1i1p1_185001-185412_2timesteps.nc'
-        srcF = scipy.io.netcdf.netcdf_file(inFile)
+        #srcF = scipy.io.netcdf.netcdf_file(inFile)
+        srcF = cdms2.open(inFile)
         #so = srcF.variables['so'][0, 0,...]
         missing_value = 1.e20
         srcGrd = [srcF.variables['lat'][:], srcF.variables['lon'][:]]
diff --git a/testing/regrid/testEsmfRegridPeriodictyRegional.py b/testing/regrid/testEsmfRegridPeriodictyRegional.py
index 2b92795255d6675dd6296b18d036c882935eb798..53a1a706b1620c4df58c68b4db26cf403001333f 100644
--- a/testing/regrid/testEsmfRegridPeriodictyRegional.py
+++ b/testing/regrid/testEsmfRegridPeriodictyRegional.py
@@ -1,9 +1,4 @@
-import vcs,cdms2
-import os,sys
-import EzTemplate
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, vcs, cdms2, EzTemplate, testing.regression as regression
 
 data = sys.argv[1]
 png = sys.argv[2]
@@ -38,11 +33,7 @@ s_esmf_lin.id = "ESMF Linear"
 s_esmf_con = s.regrid(grid_dest,regridTool="esmf",regridMethod="conservative")
 s_esmf_lin.id = "ESMF Conservative"
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x=regression.init()
 t=x.createtemplate()
 t.blank()
 t.data.priority=1
@@ -60,7 +51,5 @@ x.plot(s,M.get(),gm,bg=1)
 x.plot(s_regrid2,M.get(),gm,bg=1)
 x.plot(s_esmf_lin,M.get(),gm,bg=1)
 x.plot(s_esmf_con,M.get(),gm,bg=1)
-x.png("esmf_issue_1125")
 
-ret = checkimage.check_result_image("esmf_issue_1125.png",png,checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, "esmf_issue_1125.png", png)
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index a5faf6e242845f3ea9e8bb6e105668cb877358ea..88f5b9c6005aa3199f2cd0e7c8bd9ca658e30466 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1,21 +1,39 @@
 set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs")
 
-add_test(flake8_vcs
-  "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
+cdat_add_test(flake8_vcs
+  flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=120 # Reasonable line length
+  --ignore=F999,E121,E123,E126,E226,E24,E704 # recent version show zillions of errors if object come from an import * line
 )
 
 cdat_add_test(test_vcs_bad_png_path
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_png_path.py
 )
-cdat_add_test(test_vcs_boxfill_polar
+
+foreach(projection polar mollweide lambert orthographic mercator polyconic robinson)
+  cdat_add_test(test_vcs_boxfill_${projection}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_projection.py
+    "${BASELINE_DIR}/test_vcs_boxfill_${projection}.png"
+    ${projection}
+    )
+endforeach()
+
+foreach(lat_0 45 90)
+  cdat_add_test(test_vcs_boxfill_orthographic_${lat_0}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_orthographic.py
+    "${BASELINE_DIR}/test_vcs_boxfill_orthographic_${lat_0}.png"
+    ${lat_0}
+    )
+endforeach()
+cdat_add_test(test_vcs_extends
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_polar.py
-  "${BASELINE_DIR}/test_vcs_boxfill_polar.png"
-  )
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_extends.py
+)
 cdat_add_test(test_vcs_create_get
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_create_get.py
@@ -381,6 +399,14 @@ cdat_add_test(test_vcs_missing_colorname
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_missing_colorname.py
   "${BASELINE_DIR}/test_vcs_missing_colorname.png"
   )
+cdat_add_test(test_vcs_geometry
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py
+  )
+cdat_add_test(test_vcs_export_text
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_export_text.py
+  )
 ##############################################################################
 #
 # These tests perform plotting and need sample data
@@ -423,6 +449,11 @@ if (CDAT_DOWNLOAD_SAMPLE_DATA)
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py
     "${BASELINE_DIR}/test_vcs_boxfill_custom.png"
     )
+  cdat_add_test(test_vcs_boxfill_datawc_time
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_datawc_time.py
+    "${BASELINE_DIR}/test_vcs_boxfill_datawc_time.png"
+    )
   cdat_add_test(test_vcs_boxfill_custom_non_default_levels
     "${PYTHON_EXECUTABLE}"
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
@@ -673,6 +704,11 @@ cdat_add_test(test_vcs_settings_color_name_rgba
    ENDFOREACH(ptype)
   ENDFOREACH(gm)
 
+  cdat_add_test(test_vcs_isoline_extend_attributes
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_extend_attributes.py
+    ${BASELINE_DIR}/test_vcs_isoline_extend_attributes.png
+    )
   cdat_add_test(test_vcs_isoline_numpy
     "${PYTHON_EXECUTABLE}"
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py
@@ -843,6 +879,17 @@ cdat_add_test(test_vcs_settings_color_name_rgba
     --bigvalues
     "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png"
     )
+
+   foreach(level 0 1 2)
+     cdat_add_test(test_vcs_isofill_level${level}
+       "${PYTHON_EXECUTABLE}"
+       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_levels.py
+       "${BASELINE_DIR}/test_vcs_isofill_level${level}.png"
+       "${UVCDAT_GIT_TESTDATA_DIR}/data/HadSST1870to99.nc"
+       ${level}
+       )
+   endforeach()
+
   cdat_add_test(test_vcs_issue_960_labels
     "${PYTHON_EXECUTABLE}"
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py
@@ -900,6 +947,16 @@ cdat_add_test(test_vcs_settings_color_name_rgba
       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
       "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
       )
+    cdat_add_test(test_vcs_vectors_scale_options
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
+      )
   endif()
 endif()
 
@@ -935,6 +992,11 @@ cdat_add_test(test_vcs_configurator_resize
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_configurator_resize.py
   ${BASELINE_DIR}/test_vcs_configurator_resize.png
 )
+cdat_add_test(vcs_test_fewer_colors_than_levels
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_fewer_colors_than_levels.py
+  ${BASELINE_DIR}/test_fewer_colors_than_levels.png
+)
 
 cdat_add_test(test_vcs_colorpicker_appearance
   "${PYTHON_EXECUTABLE}"
@@ -942,33 +1004,22 @@ cdat_add_test(test_vcs_colorpicker_appearance
   ${BASELINE_DIR}/test_vcs_colorpicker_appearance.png
 )
 
-cdat_add_test(test_vcs_click_info
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info.png
-  a_boxfill
-)
-
-cdat_add_test(test_vcs_click_info_mollweide_boxfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_mollweide_boxfill.png
-  a_mollweide_boxfill
-)
-
-cdat_add_test(test_vcs_click_info_meshfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_meshfill.png
-  a_meshfill
-)
 
-cdat_add_test(test_vcs_click_info_robinson_meshfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_robinson_meshfill.png
-  a_robinson_meshfill
-)
+foreach(plot a_boxfill a_mollweide_boxfill a_meshfill a_robinson_meshfill
+        a_isofill a_isoline vector_default)
+  string(SUBSTRING ${plot} 0 2 plot_prefix)
+  if (${plot_prefix} STREQUAL "a_")
+    string(SUBSTRING ${plot} 2 -1 plot_name)
+  else ()
+    string(REGEX MATCH "[^_]+" plot_name ${plot})
+  endif ()
+  cdat_add_test(test_vcs_click_info_${plot_name}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
+    "${BASELINE_DIR}/test_vcs_click_info_${plot_name}.png"
+    ${plot}
+    )
+endforeach()
 
 
 cdat_add_test(test_vcs_mercator_edge
@@ -989,40 +1040,61 @@ cdat_add_test(test_vcs_large_pattern_hatch
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_large_pattern_hatch.py
   ${BASELINE_DIR}/test_vcs_large_pattern_hatch.png
 )
-# crashes on mac commenting out for release`
-#foreach(x_over_y 0.5 2)
-#  # a_meshfill does not work yet, as meshfills are wrapped which is not known to VCS
-#  foreach(plot a_boxfill a_mollweide_boxfill a_robinson_meshfill a_lambert_isofill a_robinson_isoline)
-#    foreach(mode foreground background)
-#      string(SUBSTRING ${plot} 2 -1 plot_name)
-#      cdat_add_test(test_vcs_autot_axis_titles_${mode}_${plot_name}_${x_over_y}
-#        "${PYTHON_EXECUTABLE}"
-#        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_autot_axis_titles.py
-#        "${BASELINE_DIR}/test_vcs_autot_axis_titles_${plot_name}_${x_over_y}.png"
-#        ${mode}
-#        ${plot}
-#        ${x_over_y}
-#        )
-#    endforeach()
-#  endforeach()
-#endforeach()
+
+foreach(x_over_y 0.5 2)
+ # a_meshfill does not work yet, as meshfills are wrapped which is not known to VCS
+ foreach(plot a_boxfill a_mollweide_boxfill a_robinson_meshfill a_lambert_isofill a_robinson_isoline)
+   foreach(mode foreground background)
+     string(SUBSTRING ${plot} 2 -1 plot_name)
+     cdat_add_test(test_vcs_autot_axis_titles_${mode}_${plot_name}_${x_over_y}
+       "${PYTHON_EXECUTABLE}"
+       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_autot_axis_titles.py
+       "${BASELINE_DIR}/test_vcs_autot_axis_titles_${plot_name}_${x_over_y}.png"
+       ${mode}
+       ${plot}
+       ${x_over_y}
+       )
+   endforeach()
+ endforeach()
+endforeach()
+
 cdat_add_test(test_vcs_boxfill_lambert_crash
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lambert_crash.py
   "${BASELINE_DIR}/test_vcs_boxfill_lambert_crash.png"
   "${UVCDAT_GIT_TESTDATA_DIR}/data/NCEP_09_climo.nc"
 )
+
+cdat_add_test(test_vcs_line_patterns
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_line_patterns.py
+  "${BASELINE_DIR}/test_vcs_line_patterns.png"
+)
+
 cdat_add_test(test_vcs_init_open_sizing
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_init_open_sizing.py
 )
-# Rename baseline
+## Rename baseline
 cdat_add_test(test_vcs_matplotlib_colormap
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_matplotlib_colormap.py
   ${BASELINE_DIR}/test_vcs_matplotlib_colormap.png
 )
 
+cdat_add_test(test_vcs_no_continents
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_no_continents.py
+  ${BASELINE_DIR}/test_vcs_no_continents.png
+)
+
+cdat_add_test(test_vcs_textextents
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_textextents.py
+  ${BASELINE_DIR}/test_textextents.png
+)
+
+
 
 add_subdirectory(vtk_ui)
 add_subdirectory(editors)
diff --git a/testing/vcs/test_fewer_colors_than_levels.py b/testing/vcs/test_fewer_colors_than_levels.py
new file mode 100644
index 0000000000000000000000000000000000000000..c500cf19e3b9aafcde84c2ad33c91ad3fd780391
--- /dev/null
+++ b/testing/vcs/test_fewer_colors_than_levels.py
@@ -0,0 +1,15 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+data = dataset("clt")
+
+canvas = regression.init()
+
+boxfill = canvas.createboxfill()
+
+boxfill.color_1 = 242
+boxfill.color_2 = 250
+
+canvas.plot(data, boxfill, bg=1)
+
+regression.run(canvas, "test_fewer_colors_than_levels.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1D_datawc.py b/testing/vcs/test_vcs_1D_datawc.py
index 1595a6cd712a277f3cb4e233969cfad24975a324..8e671bb9338f5015f6417b4f6972dd9b98ea4b5b 100644
--- a/testing/vcs/test_vcs_1D_datawc.py
+++ b/testing/vcs/test_vcs_1D_datawc.py
@@ -4,15 +4,13 @@ import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import testing.regression as regression
+
+x = regression.init()
 yx =x.createyxvsx()
 
-data = """-11.14902417  -9.17390922  -7.29515002  
+data = """-11.14902417  -9.17390922  -7.29515002
 -7.51774549  -8.63608171
   -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
      -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
@@ -30,14 +28,9 @@ data = numpy.array(data,dtype=numpy.float)
 data = MV2.array(data)
 yx.datawc_x1 = 0
 yx.datawc_x2 = 80
-yx.datawc_y1 =-12 
-yx.datawc_y2 = 12 
+yx.datawc_y1 =-12
+yx.datawc_y2 = 12
 
 
 x.plot(data,yx,bg=1)
-fnm = "test_vcs_1D_datawc.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_1D_datawc.png", src)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1D_datawc_missing.py b/testing/vcs/test_vcs_1D_datawc_missing.py
index 07e6f560aa52cb33414af3a3156092634117c18c..d6caabc8e9c0320a9bca0519ce5771c2dfd67a30 100644
--- a/testing/vcs/test_vcs_1D_datawc_missing.py
+++ b/testing/vcs/test_vcs_1D_datawc_missing.py
@@ -1,19 +1,11 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-yx =x.createyxvsx()
+x = regression.init()
+yx = x.createyxvsx()
 
 data = """
--999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. 
+-999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999.
 0.059503571833625334
 0.059503571833625334 0.05664014775641405 0.05193557222118004
 0.04777129850801233 0.0407139313814465 0.029382624830271705
@@ -42,16 +34,5 @@ data = """
 """.split()
 data = numpy.array(data,dtype=numpy.float)
 data = MV2.masked_less(data,-900)
-#yx.datawc_x1 = 0
-#yx.datawc_x2 = 80
-##yx.datawc_y1 =-12 
-#yx.datawc_y2 = 12 
-
-
 x.plot(data,yx,bg=1)
-fnm = "test_vcs_1D_datawc_missing.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_1D_datawc_missing.png")
diff --git a/testing/vcs/test_vcs_1D_with_manyDs.py b/testing/vcs/test_vcs_1D_with_manyDs.py
index 1caba446e1b601e781c9f4bf495c8c337f9741bc..28a6a7a8bc7f6427da02fdff5ef732af4affdfff 100644
--- a/testing/vcs/test_vcs_1D_with_manyDs.py
+++ b/testing/vcs/test_vcs_1D_with_manyDs.py
@@ -1,31 +1,9 @@
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 d = numpy.sin(numpy.arange(100))
-d=numpy.reshape(d,(10,10))
-
-
+d = numpy.reshape(d,(10,10))
 one = x.create1d()
-
 x.plot(d,one,bg=1)
-
-
-fnm = "test_1D_with_manyDs.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
-
+regression.run(x, "test_1D_with_manyDs.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_in_boxfill.py b/testing/vcs/test_vcs_1d_in_boxfill.py
index 292fe98a3cc7ab7dbbc24986fc7c3a771eaed4e3..1da5743947f7ac7602a2bfdfd726016fbd52b5f7 100644
--- a/testing/vcs/test_vcs_1d_in_boxfill.py
+++ b/testing/vcs/test_vcs_1d_in_boxfill.py
@@ -1,29 +1,8 @@
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 d = numpy.sin(numpy.arange(100))
-
 b = x.createboxfill()
-
 x.plot(d,b,bg=1)
-
-
-fnm = "test_1d_in_boxfill.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
-
+regression.run(x, "test_1d_in_boxfill.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py b/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
index c350e4c5992b9f7e07450a8e128daac896707796..f850f977e2626779fa515806c5052ad279f48bf8 100644
--- a/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
+++ b/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
@@ -1,42 +1,15 @@
-import vcs
-import numpy
-import MV2
-import cdms2
-import sys
-import os
 
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 data = MV2.array([4,5,6,7,1,3,7,9,])+230.
-
 p = cdms2.createAxis([2,5,100,200,500,800,850,1000])
-
 data.setAxis(0,p)
-
 data.id="jim"
-
 gm=x.create1d()
-
 gm.linewidth=0
 gm.datawc_x1=1000
 gm.datawc_x2=0
-
 gm.markersize=30
-
 x.plot(data,gm,bg=1)
-
-fnm = "test_1d_marker_not_shown_if_xaxis_flipped.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_1d_marker_not_shown_if_xaxis_flipped.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_missing.py b/testing/vcs/test_vcs_1d_missing.py
index 3a6880bdd5e556f628a02dbe3da73e1180a101a0..8c124e09c617d26b7102fb05fe6ed498c971c749 100644
--- a/testing/vcs/test_vcs_1d_missing.py
+++ b/testing/vcs/test_vcs_1d_missing.py
@@ -3,15 +3,11 @@ import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 yx =x.createyxvsx()
-
-data = """-11.14902417  -9.17390922  -7.29515002  
+data = """-11.14902417  -9.17390922  -7.29515002
 -7.51774549  -8.63608171
   -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
      -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
@@ -30,17 +26,5 @@ data = MV2.array(data)
 
 data=MV2.masked_where(MV2.logical_and(data>-4,data<-2),data)
 
-#yx.datawc_x1 = 0
-#yx.datawc_x2 = 80
-##yx.datawc_y1 =-12 
-#yx.datawc_y2 = 12 
-
-
-x.plot(data,yx,bg=1)
-fnm = "test_vcs_1d_missing.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(data, yx, bg=1)
+regression.run(x, "test_vcs_1d_missing.png", src)
diff --git a/testing/vcs/test_vcs_animate_boxfill.py b/testing/vcs/test_vcs_animate_boxfill.py
index 2bbd53c267f71f2259b7a56f10024cc85e5c889c..641b59edb259be7e5183213a31910673e923e39c 100644
--- a/testing/vcs/test_vcs_animate_boxfill.py
+++ b/testing/vcs/test_vcs_animate_boxfill.py
@@ -1,20 +1,14 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
-
 gm=x.createboxfill()
 x.plot(s,gm,bg=1)
 x.animate.create()
@@ -26,8 +20,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isofill.py b/testing/vcs/test_vcs_animate_isofill.py
index ba5ea0e8d6c7faba449fe0c5ff19c35b419aba79..591d6636213fbed88d9058f1e5bacd981634e257 100644
--- a/testing/vcs/test_vcs_animate_isofill.py
+++ b/testing/vcs/test_vcs_animate_isofill.py
@@ -1,20 +1,13 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
+x = regression.init()
 gm=x.createisofill()
 x.plot(s,gm,bg=1)
 x.animate.create()
@@ -26,8 +19,8 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),
+         regression.defaultThreshold)
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline.py b/testing/vcs/test_vcs_animate_isoline.py
index 41e0c7318b7b84f7b094a8dd2d8d0b7339456734..cea333d3bb425d480f6395a2e45329c32295514a 100644
--- a/testing/vcs/test_vcs_animate_isoline.py
+++ b/testing/vcs/test_vcs_animate_isoline.py
@@ -1,33 +1,24 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-iso=x.createisoline()
-x.plot(s,iso,bg=1)
+x = regression.init()
+iso = x.createisoline()
+x.plot(s,iso, bg=1)
 x.animate.create()
 print "Saving now"
 prefix= os.path.split(__file__)[1][:-3]
 x.animate.save("%s.mp4"%prefix)
-pngs = x.animate.close(preserve_pngs = True) # so we can look at them again
+pngs = x.animate.close(preserve_pngs=True) # so we can look at them again
 src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_colored.py b/testing/vcs/test_vcs_animate_isoline_colored.py
index 4fed45c864e7b84daccaedb5a95568d597b89daf..4519ac16c021af59bc89f6dbdf016409df02aa41 100644
--- a/testing/vcs/test_vcs_animate_isoline_colored.py
+++ b/testing/vcs/test_vcs_animate_isoline_colored.py
@@ -1,19 +1,12 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 
 iso=x.createisoline()
 levs = range(0,101,10)
@@ -34,8 +27,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_text_labels.py b/testing/vcs/test_vcs_animate_isoline_text_labels.py
index 7a3be1fc5e902cc8159884ec55732a1c0a8c2b6a..0e83c02c6a4e54f45a53f945603c977a4deabaad 100644
--- a/testing/vcs/test_vcs_animate_isoline_text_labels.py
+++ b/testing/vcs/test_vcs_animate_isoline_text_labels.py
@@ -1,11 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
@@ -27,8 +23,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py b/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
index cf009e4a8714e228eacf635c6c782226b015ab6a..02711e2196bf069a509d7f5da55f7c660421b3a2 100644
--- a/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
+++ b/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
@@ -1,11 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
@@ -36,7 +32,7 @@ pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
   print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_meshfill.py b/testing/vcs/test_vcs_animate_meshfill.py
index 78d6b1551635e0a75ab33e555ccb268108a8bff4..aa89a8639d123ac00461cb0500bbdfcebb2a5049 100644
--- a/testing/vcs/test_vcs_animate_meshfill.py
+++ b/testing/vcs/test_vcs_animate_meshfill.py
@@ -1,12 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import MV2
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 s=f("sample")
@@ -38,8 +33,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_antialiasing.py b/testing/vcs/test_vcs_antialiasing.py
index 34f43e040c458c8b8fa7c0707e28c7e167be07d0..f744d3e04f1e0c67a3f40aa27e7f738f01355a07 100644
--- a/testing/vcs/test_vcs_antialiasing.py
+++ b/testing/vcs/test_vcs_antialiasing.py
@@ -1,4 +1,3 @@
-
 import vcs
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_aspect_ratio.py b/testing/vcs/test_vcs_aspect_ratio.py
index 1e59304a378381c0620426e3d9fce5a0642ef0b4..9e3cb376760e2402787fc3bbff6040e3c55ed128 100644
--- a/testing/vcs/test_vcs_aspect_ratio.py
+++ b/testing/vcs/test_vcs_aspect_ratio.py
@@ -1,27 +1,20 @@
-
-import vcs
-import cdms2
-import sys
-import os
-import MV2
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
+src = sys.argv[1]
 pth0 = os.path.dirname(_file__)
 pth = os.path.join(pth0,"..")
 sys.path.append(pth)
-import checkimage
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",time=slice(0,1),squeeze=1)
-gm=vcs.createisofill()
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",time=slice(0,1),squeeze=1)
+gm = vcs.createisofill()
 
 def plot_a_ratio(s,gm,ratio):
     ret = 0
-    x=vcs.init()
-x.drawlogooff()
+    x = regression.init()
+    x.drawlogooff()
     x.open()
     x.geometry(400,800)
-    y=vcs.init()
+    y = regression.init()
     y.open()
     y.geometry(800,400)
     for X in [x,y]:
@@ -32,19 +25,13 @@ x.drawlogooff()
             orient = "port"
         fnm = "aspect_ratio_%s_%s.png" % (orient,ratio)
         X.png(fnm)
-        print "fnm:",fnm
         src = os.path.join(pth0,fnm)
-        print "src:",src
-        ret += checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+        ret += regression.check_result_image(fnm, src)
     return ret
 
-ret = 0 
+ret = 0
 for ratio in ["1t","2t",".5t","autot"]:
     ret  += plot_a_ratio(s,gm,ratio)
 
 
-sys.exit(ret)
-
-
-
-
+sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_auto_time_labels.py b/testing/vcs/test_vcs_auto_time_labels.py
index 2dfa661a9bc8105e8b9ec4f83a0a1f46105f192d..e5422b29cc3db5edfcc5ef3cbe7e4a051ce86dc4 100644
--- a/testing/vcs/test_vcs_auto_time_labels.py
+++ b/testing/vcs/test_vcs_auto_time_labels.py
@@ -1,18 +1,7 @@
-import vcs,cdms2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",longitude=slice(34,35),squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, cdms2, os, sys, testing.regression as regression
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",longitude=slice(34,35),squeeze=1)
+x = regression.init()
 x.plot(s,bg=1)
-fnm = "test_vcs_auto_time_labels.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_auto_time_labels.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_autot_axis_titles.py b/testing/vcs/test_vcs_autot_axis_titles.py
index ec485ccc0ac32598733cb75dfb627862bffda314..2728cbd6d5ad8f1cd1d5e360114a8244aa820c22 100644
--- a/testing/vcs/test_vcs_autot_axis_titles.py
+++ b/testing/vcs/test_vcs_autot_axis_titles.py
@@ -1,7 +1,4 @@
-import vcs
-import cdms2
-import os
-import sys
+import vcs, cdms2, os, sys, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_mollweide_boxfill': ('clt.nc', 'clt'),
@@ -11,27 +8,23 @@ testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_robinson_isoline': ('clt.nc', 'clt')}
 
 # Tests if ratio=autot works correctly for background and foreground plots
-src = sys.argv[1]
 bg = 1
 if (sys.argv[2] == 'foreground'):
     bg = 0
 plot = sys.argv[3]
 x_over_y = sys.argv[4]
 if (x_over_y == '0.5'):
-    xSize = 400
-    ySize = 800
+    xSize = 250
+    ySize = 500
 else:
     xSize = 800
     ySize = 400
 pth = os.path.join(os.path.dirname(__file__), "..")
 sys.path.append(pth)
 
-import checkimage
-
 f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
 s = f(testConfig[plot][1])
-
-x = vcs.init(bg=bg, geometry=(xSize, ySize))
+x = regression.init(bg=bg, geometry=(xSize, ySize))
 
 # graphics method
 if (plot.find('boxfill') != -1):
@@ -49,11 +42,5 @@ else:
 x.setantialiasing(0)
 x.drawlogooff()
 x.plot(s, gm, ratio="autot")
-name = "test_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
-x.png(name)
-
-print "name:", name
-print "src:", src
-
-ret = checkimage.check_result_image(name, src, checkimage.defaultThreshold)
-sys.exit(ret)
+name = "test_vcs_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
+regression.run(x, name, sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_bad_time_units.py b/testing/vcs/test_vcs_bad_time_units.py
index b89c2f7a77eaea79208af0c20e221cfce8aa9cfa..0bdcd16c2cff8ae6a6260b461b930819a902ccca 100644
--- a/testing/vcs/test_vcs_bad_time_units.py
+++ b/testing/vcs/test_vcs_bad_time_units.py
@@ -1,8 +1,8 @@
-import cdms2,vcs
-import os,sys
+import cdms2, vcs
+import os, sys
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1))
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1))
 s.getTime().units="XXX-))rvv"
-x=vcs.init()
-x.plot(s,bg=1)
+x = vcs.init()
+x.plot(s, bg=1)
diff --git a/testing/vcs/test_vcs_basic_gms.py b/testing/vcs/test_vcs_basic_gms.py
index daa144d9dd3dbbc734b704be1bcbf664f14da406..2cffb86e645cc221da0c5dbc649cf6e223b85c16 100644
--- a/testing/vcs/test_vcs_basic_gms.py
+++ b/testing/vcs/test_vcs_basic_gms.py
@@ -1,6 +1,4 @@
-
-import sys,os
-import argparse
+import argparse, os, sys, cdms2, MV2, testing.regression as regression, vcs, vtk
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -25,20 +23,9 @@ args = p.parse_args(sys.argv[1:])
 
 gm_type= args.gm
 src = args.src
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -143,7 +130,7 @@ print "fnm:",fnm
 print "src:",src
 if args.show:
     raw_input("Press Enter")
-ret = checkimage.check_result_image(fnm+'.png',src,20., cleanup=not args.keep)
+ret = regression.check_result_image(fnm+'.png',src,20., cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_basic_text.py b/testing/vcs/test_vcs_basic_text.py
index d46bd4f48c15be83c90d872ccd0d1673c067105e..d69f37ce009d3c4488583cba7b151196977e8c2b 100644
--- a/testing/vcs/test_vcs_basic_text.py
+++ b/testing/vcs/test_vcs_basic_text.py
@@ -1,11 +1,6 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
+x = regression.init()
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
 txt=x.createtext()
@@ -16,10 +11,4 @@ txt.halign = "center"
 txt.valign="base"
 txt.angle=45
 x.plot(txt,bg=1)
-fnm = "test_basic_text.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_basic_text.png", sys.argv[1])
diff --git a/testing/vcs/test_vcs_basic_vectors.py b/testing/vcs/test_vcs_basic_vectors.py
index 561f7f2d83d071fdaf456c0544fe9db61cfd1d10..37626e670fbe8cf81e09f0d8201f0ce29792a738 100644
--- a/testing/vcs/test_vcs_basic_vectors.py
+++ b/testing/vcs/test_vcs_basic_vectors.py
@@ -1,6 +1,5 @@
+import argparse, os, sys, numpy, cdms2, MV2, vcs, vtk
 
-import sys,os
-import argparse
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -18,21 +17,11 @@ args = p.parse_args(sys.argv[1:])
 
 if not args.show:
   src = args.src
-  pth = os.path.join(os.path.dirname(__file__),"..")
-  sys.path.append(pth)
-  import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-import numpy
+  import testing.regression as regression
 
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -95,7 +84,7 @@ else:
   x.png(fnm)
   print "fnm:",fnm
   print "src:",src
-  ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+  ret = regression.check_result_image(fnm+'.png',src, regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py b/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
index c012c09ca7aefc9f26bf1571bf57d0f628b9ac23..7d81b898171f9490cb568dce8248a91726351c92 100644
--- a/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
+++ b/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
@@ -1,30 +1,12 @@
-import vcs
-import cdms2
-import os
-import sys
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1),squeeze=1)
-
-x=vcs.init()
-x.drawlogooff()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-gm=x.createboxfill()
-gm.boxfill_type="custom"
-gm.levels=[1.e20,1.e20]
-gm.ext_1="y"
-gm.ext_2="y"
-
-x.plot(s,gm,bg=1)
-fnm = "test_box_custom_as_def_vistrails_exts.png"
-src =sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+import os, sys, cdms2, vcs, testing.regression as regression
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1),squeeze=1)
+x = regression.init()
+gm = x.createboxfill()
+gm.boxfill_type = "custom"
+gm.levels = [1.e20,1.e20]
+gm.ext_1 = "y"
+gm.ext_2 = "y"
+x.plot(s, gm, bg=1)
+regression.run(x, "test_box_custom_as_def_vistrails_exts.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
index b2cdf8319aa12ca23005bd1987f3a9a50f5e7f8c..2444cc8a824828490febba882f66cfbfe68e3356 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
@@ -1,23 +1,9 @@
+import vcs, numpy, os, sys, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+s = numpy.sin(numpy.arange(100))
+s = numpy.reshape(s,(10,10))
+s = numpy.ma.masked_greater(s,.5)
 
-s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
-s=numpy.ma.masked_greater(s,.5)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(s,bg=1)
-fnm= "test_vcs_boxfill_10x10_masked_numpy.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x = regression.init()
+x.plot(s, bg=1)
+regression.run(x, "test_vcs_boxfill_10x10_masked_numpy.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_10x10_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
index 07910519842a4a6dbe703d21865b3987a98f1d19..765917942d2638515a0e7884cd3a22200bea5457 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
@@ -1,22 +1,7 @@
+import vcs, numpy, os, sys, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(s,bg=1)
-fnm= "test_vcs_boxfill_10x10_numpy.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+s = numpy.sin(numpy.arange(100))
+s = numpy.reshape(s,(10,10))
+x = regression.init()
+x.plot(s, bg=1)
+regression.run(x, "test_vcs_boxfill_10x10_numpy.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom.py b/testing/vcs/test_vcs_boxfill_custom.py
index 16685d9715b881f9c9066ec2c084eee89d89e235..5330a11037c4b4956cf3e9a3074a98251edbbe69 100644
--- a/testing/vcs/test_vcs_boxfill_custom.py
+++ b/testing/vcs/test_vcs_boxfill_custom.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -24,14 +18,5 @@ boxfill.boxfill_type = 'custom'
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
 # Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom.png")
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext1.py b/testing/vcs/test_vcs_boxfill_custom_ext1.py
index dd16ab8376b60ac29220087479b1ba63cc95aa76..7a5e2005ec0707d2efe1f1d1434c7e6ff97447b5 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext1.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext1.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -28,14 +22,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext1.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext1.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py b/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
index 68b5a9a41dc74c696afbdae00193f3e3a7868df1..74407167163244afa4790eb3d62508e9333275a2 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -29,14 +23,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext1_ext2.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext1_ext2.png")
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext2.py b/testing/vcs/test_vcs_boxfill_custom_ext2.py
index 959fc2c6c04307817d9f64a2bae284e0298966b0..d45950bdbbd3f0d3fe8c606fa77051779ea8fb65 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext2.py
@@ -3,6 +3,8 @@ import os
 import sys
 import vcs
 
+import testing.regression as regression
+
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt = dataFile("clt")
@@ -10,10 +12,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -27,15 +26,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 
 canvas.plot(clt, boxfill, bg=1)
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext2.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py b/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
index 7363d22508420a31aa96728a8d3e4f18acb158ac..b84db2bb52f77af46460f6c446146ad2e988658d 100644
--- a/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
+++ b/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -25,16 +19,4 @@ boxfill.levels=levels
 boxfill.fillareacolors=vcs.getcolors(levels)
 
 canvas.plot(clt, boxfill, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_no_default_levels.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_no_default_levels.png")
diff --git a/testing/vcs/test_vcs_boxfill_datawc_time.py b/testing/vcs/test_vcs_boxfill_datawc_time.py
new file mode 100644
index 0000000000000000000000000000000000000000..3b459b7e7424e4278cd197207109e82d748ff9b0
--- /dev/null
+++ b/testing/vcs/test_vcs_boxfill_datawc_time.py
@@ -0,0 +1,23 @@
+import cdms2, os, sys, vcs, cdtime, testing.regression as regression
+
+# Test that we can restrict the plot using datawc along a time axis
+dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = dataFile("clt")
+clt = clt(latitude=(-90.0, 90.0), longitude=(0.), squeeze=1,
+          time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
+
+# Initialize canvas:
+canvas = regression.init()
+
+# Create and plot quick boxfill with default settings:
+boxfill=canvas.createboxfill()
+
+# Change the type
+boxfill.boxfill_type = 'custom'
+boxfill.datawc_y1 = 12
+
+canvas.plot(clt, boxfill, bg=1)
+
+# Load the image testing module:
+# Create the test image and compare:
+regression.run(canvas, "test_vcs_boxfill_datawc_time.png")
diff --git a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
index 009b947a9e67b5536baf38d88b9346b3ab5e5387..6cbc0f0163099434d91848c18ad3283d5b941082 100755
--- a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
+++ b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
@@ -1,23 +1,10 @@
 #!/usr/bin/env python
-import cdms2
-import cdutil
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import cdms2, cdutil, os, sys, vcs, testing.regression as regression
 
 f = cdms2.open(sys.argv[2])
 ice = f("variable_6")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
+x = regression.init()
 
-#gm = x.createisofill()
-#gm.label = "y"
 gm = x.createboxfill()
 gm.boxfill_type = "custom"
 
@@ -44,12 +31,8 @@ tmpl.legend.y1 = .03
 tmpl.legend.y2 = .055
 tmpl.max.priority = 1
 
-#tmpl.crdate.priority=1
-#tmpl.crdate.x=.8
-#tmpl.crdate.y=.95
 txt = x.createtext()
 txt.height = 20
-#txt.color=242
 txt.valign = "half"
 txt.halign = "center"
 
@@ -68,9 +51,5 @@ gm.fillareacolors = cols
 gm.datawc_y2 = 30
 gm.datawc_y1 = 90
 
-x.plot(ice, gm, tmpl, bg = 1)
-fnm = "test_boxfill_decreasing_latitude.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
-
+x.plot(ice, gm, tmpl, bg=1)
+regression.run(x, "test_boxfill_decreasing_latitude.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lambert_crash.py b/testing/vcs/test_vcs_boxfill_lambert_crash.py
index f1827882f1203ea684d07ed9ec4752be5712ceda..f445ba5551b64e7c3ae648f9132a05cff8f117d9 100644
--- a/testing/vcs/test_vcs_boxfill_lambert_crash.py
+++ b/testing/vcs/test_vcs_boxfill_lambert_crash.py
@@ -1,17 +1,10 @@
 #!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import cdms2, os, sys, vcs, testing.regression as regression
 
 f = cdms2.open(sys.argv[2])
-a=f("Z3")
+a = f("Z3")
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200, 900, units="pixels")
@@ -24,6 +17,4 @@ x.plot(a(latitude=(20,60),longitude=(-160,-120)),b, bg=1)
 fileName = os.path.basename(__file__)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, fileName)
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
index 5c69d7af3d4ea82c3bb6c5681dec17108230608b..6c1d985d7d09d26d64672912efd20e22767f3573 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
@@ -1,25 +1,10 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=.5
-b.level_2=14.5
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = .5
+b.level_2 = 14.5
+x.plot(s, b, bg=1)
+regression.run(x, "test_vcs_boxfill_lev1_lev2.png")
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
index 594949238043b7f2c8dd77471f4713ecf0dec349..67f87029e9acc16cc0edf1f1775802f7a160e5a3 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
@@ -1,26 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_1="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext1.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_1 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext1.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
index 9e355d1e18518afb127a2273566eeecc75f32f62..dc7958c596d9b9a9d852948709c5ac0a702b6e51 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
@@ -1,27 +1,12 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_1="y"
-b.ext_2="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext1_ext2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt", slice(0,1), squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_1 = "y"
+b.ext_2 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext1_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
index 375c93d8aac93d2ee545f9715c8fd293cdc3ffd6..398325eab83f0f4d398c585fda47af1728439952 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
@@ -1,26 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_2="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_2 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
index 984179e5ce0a0119433196c354bfebf330557f96..d2a39a1ba2364f5bda55da49d18040306a95e4b2 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
@@ -1,28 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-f=cdms2.open(vcs.sample_data+"/ta_ncep_87-6-88-4.nc")
-s=f("ta",slice(0,1),longitude=slice(34,35),squeeze=1)-273.15
-s=cdms2.MV2.masked_less(s,-45.)
-b=x.createboxfill()
-b.level_1=-40
-b.level_2=40
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ta_missing.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-raw_input()
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/ta_ncep_87-6-88-4.nc")
+s = f("ta",slice(0,1),longitude=slice(34,35),squeeze=1)-273.15
+s = cdms2.MV2.masked_less(s,-45.)
+b = x.createboxfill()
+b.level_1 = -40
+b.level_2 = 40
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ta_missing.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_orthographic.py b/testing/vcs/test_vcs_boxfill_orthographic.py
new file mode 100644
index 0000000000000000000000000000000000000000..b0ebbb3a0daa12ff598de8d47421b4532cb1c04c
--- /dev/null
+++ b/testing/vcs/test_vcs_boxfill_orthographic.py
@@ -0,0 +1,21 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+baselineName = sys.argv[1]
+centerlatitude = float(sys.argv[2])
+
+
+f = cdms2.open(vcs.sample_data + "/clt.nc")
+a = f("clt")
+
+x = regression.init()
+p = x.getprojection('orthographic')
+p.centerlatitude = centerlatitude
+b = x.createboxfill()
+b.projection = p
+x.plot(a(latitude=(90,-90)), b, bg=1)
+
+fileName = os.path.basename(baselineName)
+fileName = os.path.splitext(fileName)[0]
+fileName += '.png'
+
+regression.run(x, fileName)
diff --git a/testing/vcs/test_vcs_boxfill_polar.py b/testing/vcs/test_vcs_boxfill_polar.py
deleted file mode 100644
index e4f534c9dc733ec4102e8683a1c68b1fbaf486af..0000000000000000000000000000000000000000
--- a/testing/vcs/test_vcs_boxfill_polar.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-f = cdms2.open(vcs.sample_data + "/clt.nc")
-a=f("clt")
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
-
-p=x.getprojection("polar")
-b=x.createboxfill()
-b.projection=p
-#b.datawc_y1 = 90
-#b.datawc_y2 = -90
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(a(latitude=(90,-90)), b, bg=1)
-
-fileName = os.path.basename(__file__)
-fileName = os.path.splitext(fileName)[0]
-fileName += '.png'
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
diff --git a/testing/vcs/test_vcs_boxfill_projection.py b/testing/vcs/test_vcs_boxfill_projection.py
new file mode 100644
index 0000000000000000000000000000000000000000..6f319efd4bda22e781ada890a008fd9aa0cc619f
--- /dev/null
+++ b/testing/vcs/test_vcs_boxfill_projection.py
@@ -0,0 +1,20 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+baselineName = sys.argv[1]
+projection = sys.argv[2]
+
+
+f = cdms2.open(vcs.sample_data + "/clt.nc")
+a = f("clt")
+
+x = regression.init()
+p = x.getprojection(projection)
+b = x.createboxfill()
+b.projection = p
+x.plot(a(latitude=(90,-90)), b, bg=1)
+
+fileName = os.path.basename(baselineName)
+fileName = os.path.splitext(fileName)[0]
+fileName += '.png'
+
+regression.run(x, fileName)
diff --git a/testing/vcs/test_vcs_boxfill_robinson_wrap.py b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
index 81b3206bdfe0d24c720daabb16ae3f1967196304..da6cbb54a964f79057ad2531515c0c16b93e2935 100644
--- a/testing/vcs/test_vcs_boxfill_robinson_wrap.py
+++ b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
@@ -1,15 +1,7 @@
-#!/usr/bin/env python
-import cdms2, cdutil, genutil
-import vcs,os
-import sys
+import os, sys, cdms2, cdutil, genutil, vcs, testing.regression as regression
 
 # This tests if extending the longitude to more than 360 decrees is handled correctly by
 # proj4. See https://github.com/UV-CDAT/uvcdat/issues/1728 for more information.
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-
 cdmsfile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt2 = cdmsfile('clt')
 clt3 = clt2(latitude=(-90.0, 90.0),squeeze=1,longitude=(-180, 200.0),time=('1979-01', '1988-12'),)
@@ -19,7 +11,4 @@ kwargs = {}
 kwargs[ 'cdmsfile' ] = cdmsfile.id
 kwargs['bg'] = 1
 canvas.plot(clt3, gmBoxfill, **kwargs)
-fnm = "test_robinson_wrap.png"
-canvas.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_robinson_wrap.png")
diff --git a/testing/vcs/test_vcs_canvas_background.py b/testing/vcs/test_vcs_canvas_background.py
index 1d39b330d26fa37397aa5251ffcd1a91ff1d589b..2c72b51f3d4b8048d919982971193c2159d1abc4 100644
--- a/testing/vcs/test_vcs_canvas_background.py
+++ b/testing/vcs/test_vcs_canvas_background.py
@@ -1,19 +1,6 @@
-import vcs, cdms2, os, sys
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(500,500,units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
 x.backgroundcolor = (255, 255, 255)
 x.open()
-fnm = "test_backgroundcolor_white.png"
-x.png(fnm)
-
-src=sys.argv[1]
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_backgroundcolor_white.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_canvas_background_update.py b/testing/vcs/test_vcs_canvas_background_update.py
index 454f0ef0993fc6d1c1b77ffb34bcabe239f0b5a5..80f79d2aa9f70da67888f39b5a44c397196b1b6b 100644
--- a/testing/vcs/test_vcs_canvas_background_update.py
+++ b/testing/vcs/test_vcs_canvas_background_update.py
@@ -1,22 +1,8 @@
-import vcs, cdms2, os, sys
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x = vcs.init()
-
-x.drawlogooff()
-x.setbgoutputdimensions(500,500, units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
 x.backgroundcolor = (255, 255, 255)
 x.open()
 x.backgroundcolor = (255, 255, 0)
 x.update()
-fnm = "test_backgroundcolor_yellow.png"
-x.png(fnm)
-
-src=sys.argv[1]
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.check_result_image(x, "test_backgroundcolor_yellow.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_click_info.py b/testing/vcs/test_vcs_click_info.py
index 0fe587792bfd0832a90bd76c1a52d3065c3e1a9a..8d55e77c6501fb37dc06e653bb6fa7c1294c6a0d 100644
--- a/testing/vcs/test_vcs_click_info.py
+++ b/testing/vcs/test_vcs_click_info.py
@@ -1,41 +1,44 @@
-import cdms2
-import sys
-import vcs
-import os
+import os, sys, cdms2, vcs, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt', (200, 200)),
               'a_mollweide_boxfill': ('clt.nc', 'clt', (222, 322)),
+              'a_isofill': ('clt.nc', 'clt', (200, 200)),
+              'a_isoline': ('clt.nc', 'clt', (200, 200)),
+              'vector_default': ('clt.nc', ('u', 'v'), (200, 200)),
               'a_meshfill': ('sampleCurveGrid4.nc', 'sample', (222, 322)),
               'a_robinson_meshfill': ('sampleCurveGrid4.nc', 'sample', (222, 322))}
 
 # Tests if the info produced when clicking on a map is correct.
 src = sys.argv[1]
 plot = sys.argv[2]
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-# Needs to set the size of window so it is consistent accross
-# test platforms
-x.open(814, 606)
-
-# data
-f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
-s = f(testConfig[plot][1])
+x = regression.init(bg=False, geometry=(800, 600))
 
+vector = False
 # graphics method
 if (plot.find('boxfill') != -1):
     gm = x.getboxfill(plot)
 elif (plot.find('meshfill') != -1):
     gm = x.getmeshfill(plot)
+elif (plot.find('isofill') != -1):
+    gm = x.getisofill(plot)
+elif (plot.find('isoline') != -1):
+    gm = x.getisoline(plot)
+elif (plot.find('vector') != -1):
+    gm = x.getvector(plot[plot.index('_') + 1:])
+    vector = True
 else:
     print "Invalid plot"
     sys.exit(13)
 
-# Has to plot in foreground to simulate a click
-x.plot(s, gm)
+# data
+f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
+if (vector):
+    u = f(testConfig[plot][1][0])
+    v = f(testConfig[plot][1][1])
+    x.plot(u, v, gm)
+else:
+    s = f(testConfig[plot][1])
+    x.plot(s, gm)
 
 # Simulate a click -- VTK Specific
 location = testConfig[plot][2]
@@ -47,7 +50,4 @@ fileName = os.path.basename(src)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
 
-x.png(fileName, width=814, height= 606)
-
-ret = checkimage.check_result_image(fileName, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_close.py b/testing/vcs/test_vcs_close.py
index 7bf00722497445219bf28ee8ba4981e2c9f7e438..3457a648ea2c22aa437845104391c2a424c1244a 100644
--- a/testing/vcs/test_vcs_close.py
+++ b/testing/vcs/test_vcs_close.py
@@ -1,7 +1,5 @@
 import os, sys, vcs, cdms2
-#import checkimage
 
-#src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
 cdmsfile = cdms2.open(vcs.sample_data+"/clt.nc")
@@ -9,8 +7,4 @@ data = cdmsfile('clt')
 x = vcs.init()
 x.plot(data, bg=1)
 x.close()
-#x.plot(data[4][1:89], bg=1)
-#fnm = "test_vcs_close.png"
-#x.png(fnm)
-#ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
 sys.exit(0)
diff --git a/testing/vcs/test_vcs_colormaps_source.py b/testing/vcs/test_vcs_colormaps_source.py
index 1dcc7d8807f99c751e29e0ad9145e5dc5b3758cc..2cfc027217b17a03f8bc23b474b222f80c3b1056 100644
--- a/testing/vcs/test_vcs_colormaps_source.py
+++ b/testing/vcs/test_vcs_colormaps_source.py
@@ -1,27 +1,12 @@
-import vcs
-import argparse
-import cdms2
-import  os
-import sys
-
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import argparse, os, sys, cdms2, vcs, testing.regression as regression
 
 parser = argparse.ArgumentParser()
-
 parser.add_argument("-g",dest="gm",default="boxfill",choices = ["boxfill","isofill","meshfill","isoline","vector","1d"])
 parser.add_argument("-s",dest="src",default="vcs",choices=["vcs","canvas","gm"])
 parser.add_argument("-b",dest="baseline")
-
-
 args = parser.parse_args()
 
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200, 1091, units="pixels")
-x.drawlogooff()
+x = regression.init()
 
 exec("gm = x.create%s()" % args.gm)
 
@@ -55,7 +40,4 @@ else:
 fnm = "test_vcs_colormaps_source_%s_%s.png" % (args.gm,args.src)
 x.png(fnm)
 baselineImage = args.baseline
-ret = checkimage.check_result_image(fnm, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+ret = regression.run(x, fnm, baselineImage)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_colorpicker_appearance.py b/testing/vcs/test_vcs_colorpicker_appearance.py
index c925340711465ac60dc4ad92644ac33878b0095c..4ccba61fd2747ca23eb4ccd6eafdb94ad67a482e 100644
--- a/testing/vcs/test_vcs_colorpicker_appearance.py
+++ b/testing/vcs/test_vcs_colorpicker_appearance.py
@@ -1,7 +1,6 @@
 import vcs, vtk
 
 picker = vcs.colorpicker.ColorPicker(500, 250, None, 0)
-
 win = picker.render_window
 
 win.Render()
@@ -16,8 +15,6 @@ png_writer.Write()
 
 import sys, os
 if len(sys.argv) > 1:
-    src = sys.argv[1]
-    sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
-    import checkimage
-    ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+    import testing.regression as regression
+    ret = regression.check_result_image(fnm, sys.argv[1])
     sys.exit(ret)
diff --git a/testing/vcs/test_vcs_configurator_resize.py b/testing/vcs/test_vcs_configurator_resize.py
index b6179626d422de52f577790291fb1d434078d59c..bd3490afbdd8280b66b1a69231dc360624e7a51e 100644
--- a/testing/vcs/test_vcs_configurator_resize.py
+++ b/testing/vcs/test_vcs_configurator_resize.py
@@ -4,15 +4,16 @@ x = vcs.init()
 x.open()
 x.configure()
 
-x.backend.renWin.SetSize(814, 303)
-
 fnm = "test_vcs_configurator_resize.png"
 
 win = x.backend.renWin
-win.Render()
+win.SetSize(814, 303)
+
 out_filter = vtk.vtkWindowToImageFilter()
 out_filter.SetInput(win)
 
+win.Render()
+
 png_writer = vtk.vtkPNGWriter()
 png_writer.SetFileName(fnm)
 png_writer.SetInputConnection(out_filter.GetOutputPort())
@@ -20,9 +21,7 @@ png_writer.Write()
 
 import sys, os
 if len(sys.argv) > 1:
-    pth = os.path.join(os.path.dirname(__file__), "..")
-    sys.path.append(pth)
-    import checkimage
+    import testing.regression as regression
     src = sys.argv[1]
-    ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-    sys.exit(ret)
+    ret = regression.check_result_image(fnm, src)
+    sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_continents.py b/testing/vcs/test_vcs_continents.py
index 9ae3d62de208e3058801b73c6a94fbbfcdef161e..c102df9de47557d2e523979d1bb77744b5c6d54c 100644
--- a/testing/vcs/test_vcs_continents.py
+++ b/testing/vcs/test_vcs_continents.py
@@ -1,22 +1,14 @@
-import cdms2
-import os
-import sys
-import vcs
-import EzTemplate
+import os, sys, EzTemplate, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt = dataFile("clt", time="1979-1-1", squeeze=1)
 
-
 # Zero out the array so we can see the continents clearly
 clt[:] = 0
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill = canvas.createboxfill()
@@ -64,15 +56,4 @@ for i in range(12):
         canvas.plot(clt, template, boxfill, continents=7, continents_line=cont_line, bg=1)
         os.environ["UVCDAT_DIR"] = current_dotdir
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_continents.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    25)
-sys.exit(ret)
+regression.run(canvas, "test_continents.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_create_get.py b/testing/vcs/test_vcs_create_get.py
index ec525d1b4d72a56d750e8dd8e69e3648daea4811..adb879d64013d1882f21186439f805ffc0d220d6 100644
--- a/testing/vcs/test_vcs_create_get.py
+++ b/testing/vcs/test_vcs_create_get.py
@@ -1,4 +1,3 @@
-
 import vcs
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_draw_logo_on.py b/testing/vcs/test_vcs_draw_logo_on.py
index 65271eb15103b46aa4bdb53823a0aea40477e45e..4a0c28d2cdd93d8fe057eab8fe4b7bfa4e6cd1ed 100644
--- a/testing/vcs/test_vcs_draw_logo_on.py
+++ b/testing/vcs/test_vcs_draw_logo_on.py
@@ -1,19 +1,9 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = vcs.init()
 a=numpy.arange(100)
 a.shape=(10,10)
 x.plot(a,bg=1)
 fnm = "test_vcs_draw_logo_on.png"
 x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.check_result_image(fnm, sys.argv[1])
diff --git a/testing/vcs/test_vcs_dump_json.json b/testing/vcs/test_vcs_dump_json.json
index b79b1319c3ec2fde786acbb559bb9a5f41e1e6e1..d408449871bef2eb96b994ed7f64ff41f6696f10 100644
--- a/testing/vcs/test_vcs_dump_json.json
+++ b/testing/vcs/test_vcs_dump_json.json
@@ -1,6 +1,6 @@
 {
  "G1d": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -29,7 +29,7 @@
   }
  }, 
  "Gfb": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "boxfill_type": "linear", 
    "color_1": 16, 
    "color_2": 239, 
@@ -70,7 +70,7 @@
   }
  }, 
  "Gfi": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -110,7 +110,7 @@
   }
  }, 
  "Gfm": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -153,7 +153,7 @@
   }
  }, 
  "Gi": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "angle": [
     35.0
    ], 
@@ -210,8 +210,50 @@
    "yticlabels2": "*"
   }
  }, 
+ "Gv": {
+  "vcs_instance": {
+   "alignment": "center", 
+   "colormap": null, 
+   "datawc_calendar": 135441, 
+   "datawc_timeunits": "days since 2000", 
+   "datawc_x1": 1e+20, 
+   "datawc_x2": 1e+20, 
+   "datawc_y1": 1e+20, 
+   "datawc_y2": 1e+20, 
+   "line": null, 
+   "linecolor": null, 
+   "linewidth": null, 
+   "projection": "linear", 
+   "reference": 1e+20, 
+   "scale": 1.0, 
+   "scaleoptions": [
+    "off", 
+    "constant", 
+    "normalize", 
+    "linear", 
+    "constantNNormalize", 
+    "constantNLinear"
+   ], 
+   "scalerange": [
+    0.1, 
+    1.0
+   ], 
+   "scaletype": "constantNNormalize", 
+   "type": "arrows", 
+   "xaxisconvert": "linear", 
+   "xmtics1": "", 
+   "xmtics2": "", 
+   "xticlabels1": "*", 
+   "xticlabels2": "*", 
+   "yaxisconvert": "linear", 
+   "ymtics1": "", 
+   "ymtics2": "", 
+   "yticlabels1": "*", 
+   "yticlabels2": "*"
+  }
+ }, 
  "P": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "box1": {
     "line": "default", 
     "priority": 1, 
@@ -579,7 +621,7 @@
   }
  }, 
  "Proj": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "parameters": [
     1e+20, 
     1e+20, 
@@ -601,7 +643,7 @@
   }
  }, 
  "Tf": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -632,7 +674,7 @@
   }
  }, 
  "Tl": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -662,7 +704,7 @@
   }
  }, 
  "Tm": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -691,7 +733,7 @@
   }
  }, 
  "To": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "angle": 0, 
    "halign": 0, 
    "height": 14, 
@@ -700,7 +742,7 @@
   }
  }, 
  "Tt": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "backgroundcolor": 0, 
    "backgroundopacity": 0, 
    "color": 1, 
diff --git a/testing/vcs/test_vcs_dump_json.py b/testing/vcs/test_vcs_dump_json.py
index aca6215b89fb5196da3ae09054dec9d2e3a07f63..421606c4d836ff0a9d048529fc152acbde577a21 100644
--- a/testing/vcs/test_vcs_dump_json.py
+++ b/testing/vcs/test_vcs_dump_json.py
@@ -1,33 +1,35 @@
 
 import filecmp
 import vcs,numpy,os,sys
-src=sys.argv[1]
+src = sys.argv[1]
 if os.path.exists("test_vcs_dump_json.json"):
     os.remove("test_vcs_dump_json.json")
 
-b = vcs.createboxfill("Charles.Doutriaux")
+b = vcs.createboxfill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createisofill("Charles.Doutriaux")
+b = vcs.createisofill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createisoline("Charles.Doutriaux")
+b = vcs.createisoline("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createmeshfill("Charles.Doutriaux")
+b = vcs.createmeshfill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.create1d("Charles.Doutriaux")
+b = vcs.create1d("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createfillarea("Charles.Doutriaux")
+b = vcs.createfillarea("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createtext("Charles.Doutriaux")
+b = vcs.createvector("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createline("Charles.Doutriaux")
+b = vcs.createtext("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createmarker("Charles.Doutriaux")
+b = vcs.createline("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createtemplate("Charles.Doutriaux")
+b = vcs.createmarker("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createprojection("Charles.Doutriaux")
+b = vcs.createtemplate("vcs_instance")
+b.script("test_vcs_dump_json","a")
+b = vcs.createprojection("vcs_instance")
 b.script("test_vcs_dump_json","a")
 
-assert(filecmp.cmp("test_vcs_dump_json.json",src))
+assert(filecmp.cmp("test_vcs_dump_json.json", src))
 
 
diff --git a/testing/vcs/test_vcs_export_text.py b/testing/vcs/test_vcs_export_text.py
new file mode 100644
index 0000000000000000000000000000000000000000..d4507e3d80f489a377a59aa18b040821d6ca009f
--- /dev/null
+++ b/testing/vcs/test_vcs_export_text.py
@@ -0,0 +1,51 @@
+import cdms2, vcs, tempfile
+
+x = vcs.init(bg=1, geometry=(800, 600))
+txt = x.createtext()
+txt.x = [0.2, 0.2, 0.5, 0.8, 0.8]
+txt.y = [0.2, 0.8, 0.5, 0.8, 0.2]
+txt.string = ["SAMPLE TEXT A","SAMPLE TEXT B","SAMPLE TEXT C","SAMPLE TEXT D","SAMPLE TEXT E"]
+txt.halign = "center"
+txt.valign = "base"
+txt.height = 10
+x.plot(txt)
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
+              prefix='textAsPathsFalse', delete=False)
+x.postscript(tmpfile.name, textAsPaths=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
+              prefix='textAsPathsTrue', delete=False)
+x.postscript(tmpfile.name, textAsPaths=True)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
+              prefix='textAsPathsFalse', delete=False)
+x.pdf(tmpfile.name, textAsPaths=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
+              prefix='textAsPathsTrue', delete=False)
+x.pdf(tmpfile.name, textAsPaths=True)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
+              prefix='textAsPathsFalse', delete=False)
+x.svg(tmpfile.name, textAsPaths=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
+              prefix='textAsPathsTrue', delete=False)
+x.svg(tmpfile.name, textAsPaths=True)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \
+              prefix='textAsPathsFalse', delete=False)
+x.eps(tmpfile.name, textAsPaths=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \
+              prefix='textAsPathsTrue', delete=False)
+x.eps(tmpfile.name, textAsPaths=True)
+tmpfile.close()
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_extends.py b/testing/vcs/test_vcs_extends.py
new file mode 100644
index 0000000000000000000000000000000000000000..4106cecd64c7215d628a241179475f280e56e42f
--- /dev/null
+++ b/testing/vcs/test_vcs_extends.py
@@ -0,0 +1,29 @@
+import vcs
+import numpy
+
+box = vcs.createboxfill()
+
+box.ext_1 = True
+assert numpy.allclose(box.levels, [1e20] * 2)
+
+box.ext_2 = True
+assert numpy.allclose(box.levels, [1e20] * 2)
+
+box.levels = [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
+assert box.ext_1 == False
+assert box.ext_1 == False
+
+box.ext_1 = True
+assert box.levels[0] < -9e19
+
+box.ext_2 = True
+assert box.levels[-1] > 9e19
+
+box.ext_1 = False
+assert box.levels[0] > -9e19
+
+box.ext_2 = False
+assert box.levels[-1] < 9e19
+
+
+
diff --git a/testing/vcs/test_vcs_fillarea_basics_no_plot.py b/testing/vcs/test_vcs_fillarea_basics_no_plot.py
index 870aae9be8c592ec896dac0eaf854aa4f8282b62..9a8e38ce7f388be647fad7b227695fc5c56d2af5 100644
--- a/testing/vcs/test_vcs_fillarea_basics_no_plot.py
+++ b/testing/vcs/test_vcs_fillarea_basics_no_plot.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_fillarea_transparency.py b/testing/vcs/test_vcs_fillarea_transparency.py
index dc3a8bf4bd052b00fe31ab47223c03fe3d943e27..831b3e029f53b28111ff5a92005e79e9590bd54b 100644
--- a/testing/vcs/test_vcs_fillarea_transparency.py
+++ b/testing/vcs/test_vcs_fillarea_transparency.py
@@ -1,13 +1,6 @@
-import vcs
-import sys,os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, sys, os, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
 fa1 = x.createfillarea()
 
@@ -29,8 +22,4 @@ x.plot(fa1,bg=True)
 x.plot(fa2,bg=True)
 
 fnm = os.path.split(__file__[:-2]+"png")[-1]
-x.png(fnm)
-src = sys.argv[1]
-
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_first_png_blank.py b/testing/vcs/test_vcs_first_png_blank.py
index d11c59e6d5e1fec5a9c12c03882fd1117ff7c1d5..1e0bd8e2802838828c38281b2904c6ce5bd58291 100644
--- a/testing/vcs/test_vcs_first_png_blank.py
+++ b/testing/vcs/test_vcs_first_png_blank.py
@@ -1,20 +1,7 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 T=f('clt')
-v = vcs.init()
-v.setantialiasing(0)
-v.setbgoutputdimensions(1200,1091,units="pixels")
+v = regression.init()
 v.plot(T,bg=1)
-# This will write a blank plot to a file:
-fnm = "first_png_blank.png"
-v.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(v, 'first_png_blank.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipNone.py b/testing/vcs/test_vcs_flipNone.py
index a76e271f1fe620f1a5c0aefcee5e34ee232151bf..79b69ffddbe985a8bc6240cb33f599c5bafb986f 100644
--- a/testing/vcs/test_vcs_flipNone.py
+++ b/testing/vcs/test_vcs_flipNone.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,level=(0,10000))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipNone.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipNone.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipX.py b/testing/vcs/test_vcs_flipX.py
index 0de7001aacba52022175308cb0b678550ada7137..e211bf16a771dfbb98fff5f20d19939d4dc4bb3f 100644
--- a/testing/vcs/test_vcs_flipX.py
+++ b/testing/vcs/test_vcs_flipX.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,latitude=(90,-90),level=(0,10000))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipX.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipX.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipXY.py b/testing/vcs/test_vcs_flipXY.py
index 8dd0f8d89332c0385a283847c97579e6fdf9bd2f..779a0fe7e523343a00b8210d0b126f15937c94e8 100644
--- a/testing/vcs/test_vcs_flipXY.py
+++ b/testing/vcs/test_vcs_flipXY.py
@@ -1,26 +1,9 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,latitude=(90,-90))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipXY.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipXY.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipY.py b/testing/vcs/test_vcs_flipY.py
index 7194f3f3067aae4f18c1ea6f0d63e116ec51fa07..5efa5774512a55cc3e91a4f7480a598c3e41e354 100644
--- a/testing/vcs/test_vcs_flipY.py
+++ b/testing/vcs/test_vcs_flipY.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1)
 x.plot(s,bg=1)
-fnm = "test_vcs_flipY.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipY.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_gen_meshfill.py b/testing/vcs/test_vcs_gen_meshfill.py
index b726a20a692a6c89bae24f4f1dacd041e8769157..e5994055465ca30e5db4181ad23eb3aa5dd95c9d 100644
--- a/testing/vcs/test_vcs_gen_meshfill.py
+++ b/testing/vcs/test_vcs_gen_meshfill.py
@@ -1,39 +1,16 @@
+import os, sys, numpy, vcs, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 
 data = numpy.array([1,2,3,4])
-
 blon = numpy.array([-1,1,1,0,-1])
 blat = numpy.array([0,0,1,2,1])
-
 acell=numpy.array([blat,blon])
 bcell = numpy.array([blat,blon+2.5])
 ccell = numpy.array([blat+2.5,blon+2.5])
 dcell = numpy.array([blat+2.5,blon])
-
 mesh = numpy.array([acell,bcell,ccell,dcell])
-
 m=x.createmeshfill()
 
 x.plot(data,mesh,m,bg=1)
-
-
-fnm = "test_vcs_gen_meshfill.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, "test_vcs_gen_meshfill.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_geometry.py b/testing/vcs/test_vcs_geometry.py
new file mode 100644
index 0000000000000000000000000000000000000000..578bfb67d1c82a4c3232d2620c762191cc92e883
--- /dev/null
+++ b/testing/vcs/test_vcs_geometry.py
@@ -0,0 +1,32 @@
+import sys, vcs
+
+# This will check if we can set the geometry
+# at the initialization of canvas
+canvas = vcs.init(geometry=(600, 400))
+canvas.open()
+
+if dict(width=600, height=400) != canvas.geometry():
+    canvas.close()
+    sys.exit(1)
+
+canvas.close()
+
+canvas2 = vcs.init()
+
+# This will check if we can safely set the geometry even
+# though the canvas window has not been created yet
+canvas2.geometry(400, 400)
+canvas2.open()
+if dict(width=400, height=400) != canvas2.geometry():
+    canvas2.close()
+    sys.exit(1)
+
+# This will check if we can dynamically change the geometry
+canvas2.geometry(500, 400)
+canvas2.geometry(500, 500)
+if dict(width=500, height=500) != canvas2.geometry():
+    canvas2.close()
+    sys.exit(1)
+
+canvas2.close()
+sys.exit(0)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_gms_animate_projected_plots.py b/testing/vcs/test_vcs_gms_animate_projected_plots.py
index 2e521cd4ad292b5ae1a4c1606b73df550103e752..3de234e418cb9ab65b0b5c1f16b30ac573e39d86 100644
--- a/testing/vcs/test_vcs_gms_animate_projected_plots.py
+++ b/testing/vcs/test_vcs_gms_animate_projected_plots.py
@@ -1,15 +1,5 @@
 # Test animation of projected plots
-
-import argparse
-import cdms2
-import MV2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import argparse, os, sys, cdms2, MV2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Testing animation of projected plots")
 p.add_argument("--gm_type", dest="gm", help="gm to test")
@@ -19,18 +9,13 @@ p.add_argument("--source", dest="src", help="path to baseline image")
 p.add_argument("--keep", dest="keep", action="store_true", default=False,
                help="Save images, even if baseline matches.")
 p.add_argument("--threshold", dest="threshold", type=int,
-               default=checkimage.defaultThreshold,
+               default=regression.defaultThreshold,
                help="Threshold value for image differnces")
 
 args = p.parse_args(sys.argv[1:])
 
 gm_type = args.gm
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
-
+x = regression.init()
 s = None
 
 if gm_type == "meshfill":
@@ -72,7 +57,7 @@ pngs = x.animate.close(preserve_pngs=True)  # so we can look at them again
 ret = 0
 pdir = os.path.split(pngs[0])[0]
 p = pdir + os.sep + "anim_0.png"
-ret = checkimage.check_result_image(p, args.src, args.threshold)
+ret = regression.check_result_image(p, args.src, args.threshold)
 if ret == 0 and not args.keep:
     for f in pngs:
         if os.path.isfile(f):
diff --git a/testing/vcs/test_vcs_gms_patterns_hatches.py b/testing/vcs/test_vcs_gms_patterns_hatches.py
index 4030429a0d02cfe2b2b4afb070722fca29cde8fd..a7681a4205b962e7a28da73a7250f5a351bfcf6d 100644
--- a/testing/vcs/test_vcs_gms_patterns_hatches.py
+++ b/testing/vcs/test_vcs_gms_patterns_hatches.py
@@ -1,14 +1,4 @@
-# Test the use of patterns/hatches for plots
-
-import argparse
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import argparse, os, sys, cdms2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Patterns/hatches testing code for vcs gms")
 p.add_argument("--source", dest="src", help="source image file")
@@ -22,7 +12,7 @@ p.add_argument("--lat2", dest="lat2", default=90, type=float, help="Last latitud
 p.add_argument("--lon1", dest="lon1", default=-180, type=float, help="First Longitude")
 p.add_argument("--lon2", dest="lon2", default=180, type=float, help="Last Longitude")
 p.add_argument("--keep", dest="keep", action="store_true", help="Save image, even if baseline matches.")
-p.add_argument("--threshold", dest="threshold", type=int, default=checkimage.defaultThreshold,
+p.add_argument("--threshold", dest="threshold", type=int, default=regression.defaultThreshold,
         help="Default threshold")
 p.add_argument("--non-contiguous", dest="contig", default=True, action="store_false", help="use non contiguous levels")
 
@@ -98,7 +88,7 @@ fnm += nm_xtra
 x.png(fnm)
 print "fnm:", fnm
 print "src:", src
-ret = checkimage.check_result_image(fnm+'.png', src,
+ret = regression.check_result_image(fnm+'.png', src,
                                     args.threshold,
                                     cleanup=not args.keep)
 if args.show:
diff --git a/testing/vcs/test_vcs_hatches_patterns.py b/testing/vcs/test_vcs_hatches_patterns.py
index 8cd1766f0db987dfb385f30bf9d277dfaee5f90a..8a467259c835be3ca76c319952eea6318bae31e8 100644
--- a/testing/vcs/test_vcs_hatches_patterns.py
+++ b/testing/vcs/test_vcs_hatches_patterns.py
@@ -1,17 +1,6 @@
-import os
-import sys
-import vcs
+import os, sys, vcs, testing.regression as regression
 
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-baselineImage = sys.argv[1]
-
-# Initialize the graphics canvas
-x = vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 
 # Create a test plot for listing all the hatches and patterns
 style_list = []
@@ -77,11 +66,4 @@ plot_title.y = [.9]
 x.plot(plot_title, bg=1)
 x.plot(fill_test, bg=1)
 x.plot(fill_info, bg=1)
-
-testImage = os.path.abspath("test_vcs_hatches_patterns.png")
-x.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(x, "test_vcs_hatches_patterns.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_import.py b/testing/vcs/test_vcs_import.py
index c1224212518fcbde53cc84adca3a94c58c0561ec..01c3d0447cccc402b1ec4d3c3e759249a6e58e04 100644
--- a/testing/vcs/test_vcs_import.py
+++ b/testing/vcs/test_vcs_import.py
@@ -1,3 +1,2 @@
-
 import vcs
 
diff --git a/testing/vcs/test_vcs_infinity.py b/testing/vcs/test_vcs_infinity.py
index 235f551ebd9d12058082808a30525047260fd88d..daf40601683bed2b57271251687da2ae742f1b9a 100644
--- a/testing/vcs/test_vcs_infinity.py
+++ b/testing/vcs/test_vcs_infinity.py
@@ -1,28 +1,16 @@
-
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import numpy
+import os, sys, numpy, MV2, cdms2, testing.regression as regression
 
 s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
+s = numpy.reshape(s,(10,10))
 
 s[4,6] = numpy.inf
 s[7,9] = numpy.NINF
 s[9,2] = numpy.nan
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
 x.plot(s,bg=1)
 fnm = "infinity.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_iso_celine_part1.py b/testing/vcs/test_vcs_iso_celine_part1.py
index 586918d36ac55ae19827b0243fe215e9ba7374a3..a8fcc5ae72162cc06e29c599ed834bad0b6bb3d6 100644
--- a/testing/vcs/test_vcs_iso_celine_part1.py
+++ b/testing/vcs/test_vcs_iso_celine_part1.py
@@ -1,22 +1,13 @@
+import os,sys, MV2, numpy, vcs, cdms2, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth0 = os.path.dirname(__file__)
-pth = os.path.join(pth0,"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(pth0,"celine.nc"))
-s=f("data")
-x=vcs.init()
+f = cdms2.open(os.path.join(pth0,"celine.nc"))
+s = f("data")
+x = regression.init()
 x.setantialiasing(0)
-x.drawlogooff()
 x.scriptrun(os.path.join(pth0,"celine.json"))
-i=x.getisofill("celine")
+i = x.getisofill("celine")
 x.plot(s,i,bg=1)
 fnm = "test_celine_iso.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_iso_celine_part2.py b/testing/vcs/test_vcs_iso_celine_part2.py
index f99f821da90cb03eaef8519b6834c207cbb20a1d..c1c1df5b1c8507b88a72a1a2076ddc88d4dafc34 100644
--- a/testing/vcs/test_vcs_iso_celine_part2.py
+++ b/testing/vcs/test_vcs_iso_celine_part2.py
@@ -1,26 +1,15 @@
+import os, sys, MV2, numpy, vcs, cdms2, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
 pth0 = os.path.dirname(__file__)
-pth = os.path.join(pth0,"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(pth0,"celine.nc"))
-s=f("data")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
+f = cdms2.open(os.path.join(pth0,"celine.nc"))
+s = f("data")
+x = regression.init()
 x.scriptrun(os.path.join(pth0,"celine.json"))
-i=x.getisofill("celine")
-b=vcs.createboxfill()
+i = x.getisofill("celine")
+b = vcs.createboxfill()
 b.levels=i.levels
 b.fillareacolors=i.fillareacolors
 b.boxfill_type="custom"
 x.plot(s,b,bg=1)
 fnm = "test_celine_iso_2.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_data_read_north_to_south.py b/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
index 5f12f65b0b0cfe87815ea99a076b46061970d5cf..8d1cc949c142c9356c3ec31f6129a15e807ba4cf 100644
--- a/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
+++ b/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
@@ -1,24 +1,10 @@
-import cdms2
-import vcs
-import sys
-import os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
+
 f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 clt = f("clt",latitude=(80.0, 38.0),squeeze=1,longitude=(-180.0, 180.0),time=slice(0,1))
-x = vcs.init()
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.setantialiasing(0)
+x = regression.init()
 gm = vcs.createisofill()
 gm.projection="polar"
 x.plot( clt,gm,bg=1)
 fnm = os.path.split(__file__)[-1][:-2]+"png"
-src= sys.argv[1]
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_isoline_labels.py b/testing/vcs/test_vcs_isofill_isoline_labels.py
index d230fc8fc4732218a8829f64529bd83e3b7eb403..192ffc4bf904f4eb0da3b0bef6f711856524e4e4 100644
--- a/testing/vcs/test_vcs_isofill_isoline_labels.py
+++ b/testing/vcs/test_vcs_isofill_isoline_labels.py
@@ -1,24 +1,10 @@
-import vcs,cdms2,sys,os
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isofill = canvas.createisofill()
-
-# First plot the isofill
 canvas.plot(data, isofill, bg=1)
-
 isoline = canvas.createisoline()
 isoline.label="y"
 texts=[]
@@ -35,13 +21,6 @@ for i in range(10):
 isoline.text = texts
 isoline.linecolors = colors
 
-# Next plot the isolines with labels
+# Plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isofill_isoline_labels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_isofill_isoline_labels.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_levels.py b/testing/vcs/test_vcs_isofill_levels.py
new file mode 100644
index 0000000000000000000000000000000000000000..912178148bed79f5aafc5e3c6c0ac50d2730cdd4
--- /dev/null
+++ b/testing/vcs/test_vcs_isofill_levels.py
@@ -0,0 +1,19 @@
+import cdms2
+import os
+import sys
+import testing.regression as regression
+import vcs
+
+data = sys.argv[2]
+level = sys.argv[3]
+levels = {'0': range(-5,36,5),
+          '1': [-1000, -15, 35],
+          '2': [-300, -15, 0, 15, 25]}
+
+x=regression.init(bg=1)
+f=cdms2.open(data)
+s=f("sst")
+iso=x.createisofill()
+iso.levels=levels[level]
+x.plot(s,iso)
+regression.run(x, "test_vcs_isofill_level%s.png"%level)
diff --git a/testing/vcs/test_vcs_isofill_mask_cell_shift.py b/testing/vcs/test_vcs_isofill_mask_cell_shift.py
index 9bc8671131e5bd215abdc918cc114667e8af018b..199b1586d466b7b4dcc2df3f6b516ce3be3b2aea 100644
--- a/testing/vcs/test_vcs_isofill_mask_cell_shift.py
+++ b/testing/vcs/test_vcs_isofill_mask_cell_shift.py
@@ -1,25 +1,8 @@
-import os,sys
-import MV2
-import vcs
-import cdms2
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1),latitude=(30,70),longitude=(-130,-60))
-s2=MV2.masked_greater(s,65.)
+import os, sys, MV2, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1),latitude=(30, 70),longitude=(-130, -60))
+s2 = MV2.masked_greater(s, 65.)
 x.plot(s2,"default","isofill",bg=1)
-fnm = "test_vcs_isofill_mask_cell_shift.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_isofill_mask_cell_shift.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_extend_attributes.py b/testing/vcs/test_vcs_isoline_extend_attributes.py
new file mode 100644
index 0000000000000000000000000000000000000000..77b212c3e45b36c9bba63c255ac340a4bec9fdca
--- /dev/null
+++ b/testing/vcs/test_vcs_isoline_extend_attributes.py
@@ -0,0 +1,14 @@
+import cdms2
+import vcs
+import testing.regression as regression
+
+x = regression.init()
+isoline = vcs.createisoline()
+f = cdms2.open(vcs.sample_data + '/clt.nc')
+s = f("clt")
+isoline.line = ["dash-dot"]
+isoline.linecolors = [250]
+isoline.linewidths = [5]
+x.plot(s, isoline)
+fnm = "test_vcs_isoline_extend_attributes.png"
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_isoline_labels.py b/testing/vcs/test_vcs_isoline_labels.py
index 77033f2c822cc29c9efda6ac54df7dcccffc5cfe..46023543162646912f649feb99dded3566708009 100644
--- a/testing/vcs/test_vcs_isoline_labels.py
+++ b/testing/vcs/test_vcs_isoline_labels.py
@@ -1,20 +1,8 @@
-import vcs,cdms2,sys,os
-
-# ('/path/to/filename', '.extension')
-baseline = os.path.splitext(sys.argv[1])
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isoline = canvas.createisoline()
 isoline.label="y"
 texts=[]
@@ -33,24 +21,17 @@ isoline.text = texts
 # First test using isoline.text[...].color
 canvas.plot(data, isoline, bg=1)
 
+baseline = os.path.splitext(sys.argv[1])
 baselineImage = "%s%s"%baseline
-testImage = os.path.abspath("test_isoline_labels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
+ret = regression.run_wo_terminate(canvas, "test_isoline_labels.png", baselineImage)
 
 # Now set isoline.linecolors and test again.
 canvas.clear()
 isoline.linecolors = colors
 canvas.plot(data, isoline, bg=1)
-
 baselineImage = "%s%d%s"%(baseline[0], 2, baseline[1])
 testImage = os.path.abspath("test_isoline_labels2.png")
-canvas.png(testImage)
-
-ret += checkimage.check_result_image(testImage, baselineImage,
-                                     checkimage.defaultThreshold)
+ret += regression.run_wo_terminate(canvas, testImage, baselineImage)
 
 # Now set isoline.textcolors and test again.
 canvas.clear()
@@ -59,9 +40,6 @@ canvas.plot(data, isoline, bg=1)
 
 baselineImage = "%s%d%s"%(baseline[0], 3, baseline[1])
 testImage = os.path.abspath("test_isoline_labels3.png")
-canvas.png(testImage)
-
-ret += checkimage.check_result_image(testImage, baselineImage,
-                                     checkimage.defaultThreshold)
+ret += regression.run_wo_terminate(canvas, testImage, baselineImage)
 
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_isoline_labels_background.py b/testing/vcs/test_vcs_isoline_labels_background.py
index da1d1e6ac99cd30f74645fe2b820c2801708fa91..08088836adfea942aa61caec0fecbd15c920e56e 100644
--- a/testing/vcs/test_vcs_isoline_labels_background.py
+++ b/testing/vcs/test_vcs_isoline_labels_background.py
@@ -1,26 +1,13 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 import random
 
-# ('/path/to/filename', '.extension')
-baseline = os.path.splitext(sys.argv[1])
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
 (latmin, latmax, lonmin, lonmax) = (-90, 90, -180, 180)
 dataset = cdms2.open(os.path.join(vcs.sample_data, "tas_cru_1979.nc"))
 data = dataset("tas", time=slice(0, 1), latitude=(latmin, latmax),
                longitude=(lonmin, lonmax, 'co'), squeeze=1)
 dataset.close()
 
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 canvas.backgroundcolor = [100, 105, 105]
 
 isoline = canvas.createisoline()
@@ -50,11 +37,6 @@ isoline.labelskipdistance = 15.0
 
 # First test using isoline.text[...].color
 canvas.plot(data, isoline, bg=1)
-
+baseline = os.path.splitext(sys.argv[1])
 baselineImage = "%s%s" % baseline
-testImage = os.path.abspath("test_isoline_labels_background.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, baselineImage)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py b/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
index da5e5d3c3769b739999fcb9cc2115e50f5a1cb74..8c6e13a7e95a5817dc1dfd9518d8cf9b7d7f6cb5 100644
--- a/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
+++ b/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
@@ -1,30 +1,18 @@
-import vcs
-import cdms2
-import os,sys
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-iso=x.createisoline()
-t=x.createtext()
-t.color=243
-t.height=25
-to=x.createtextorientation()
-to.height = 55
-tt=x.createtexttable()
-tt.color=245
-iso.textcolors=[None,None,None,242,244]
-iso.text=[t,tt,to]
-iso.label="y"
-x.plot(s,iso,bg=1)
-x.png("test_vcs_isoline_labels_multi_label_input_types.png")
-
-src=sys.argv[1]
+import os, sys, cdms2, vcs, testing.regression as regression
 
-ret = checkimage.check_result_image('test_vcs_isoline_labels_multi_label_input_types.png',src,checkimage.defaultThreshold)
-sys.exit(ret)
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt")
+x = regression.init()
+iso = x.createisoline()
+t = x.createtext()
+t.color = 243
+t.height = 25
+to = x.createtextorientation()
+to.height = 55
+tt = x.createtexttable()
+tt.color = 245
+iso.textcolors = [None,None,None,242,244]
+iso.text = [t,tt,to]
+iso.label = "y"
+x.plot(s, iso, bg=1)
+regression.run(x, "test_vcs_isoline_labels_multi_label_input_types.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labelskipdistance.py b/testing/vcs/test_vcs_isoline_labelskipdistance.py
index 04ecdfabb404b3a568483691b74bf706723eab50..4b119e0bc3504ef25e78eb20d3e4307b43f1ecac 100644
--- a/testing/vcs/test_vcs_isoline_labelskipdistance.py
+++ b/testing/vcs/test_vcs_isoline_labelskipdistance.py
@@ -1,21 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 isoline = canvas.createisoline()
 isoline.label = "y"
@@ -36,11 +23,4 @@ isoline.linecolors = colors
 
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isoline_labelskipdistance.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_isoline_labelskipdistance.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_numpy.py b/testing/vcs/test_vcs_isoline_numpy.py
index ced140e773fb43905c7240974aa644308caeb9dc..4534529ba31b5c65be6ade918b586ff3d5a6026a 100644
--- a/testing/vcs/test_vcs_isoline_numpy.py
+++ b/testing/vcs/test_vcs_isoline_numpy.py
@@ -1,21 +1,10 @@
-import vcs,cdms2,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.drawlogooff()
-fnm = os.path.join(vcs.sample_data,'clt.nc')
-f=cdms2.open(fnm)
+import os, sys, cdms2, vcs, testing.regression as regression
 
-s=f("clt")
-gm=x.createisofill()
-x.plot(s.filled(),gm,bg=1)
+x = regression.init()
+fnm = os.path.join(vcs.sample_data,'clt.nc')
+f = cdms2.open(fnm)
+s = f("clt")
+gm = x.createisofill()
+x.plot(s.filled(),gm)
 fnm = "test_vcs_isoline_numpy.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_isoline_width_stipple.py b/testing/vcs/test_vcs_isoline_width_stipple.py
index b563436c26017e9e57d94adde068cf567f773303..20a7e5c4ff48c532d70069ac369f4e7ed6cd8dcb 100644
--- a/testing/vcs/test_vcs_isoline_width_stipple.py
+++ b/testing/vcs/test_vcs_isoline_width_stipple.py
@@ -1,22 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isoline = canvas.createisoline()
 isoline.label = "y"
 texts = []
@@ -35,15 +21,8 @@ for i in range(7):
 isoline.levels = levels
 isoline.text = texts
 isoline.linecolors = colors
-
 isoline.linewidths = (1, 2, 3, 4, 1)
 isoline.line = ('dot', 'dash', 'solid', 'dash-dot', 'long-dash', 'dot', 'dash')
-
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isoline_width_stipple.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage, 30)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_isoline_width_stipple.png")
diff --git a/testing/vcs/test_vcs_issue_960_labels.py b/testing/vcs/test_vcs_issue_960_labels.py
index ee45216cdcf00841cad219df39509b58e22d0ebe..7da2104ff4de4d623af43a1c3ae58ec091f41ac2 100644
--- a/testing/vcs/test_vcs_issue_960_labels.py
+++ b/testing/vcs/test_vcs_issue_960_labels.py
@@ -1,29 +1,19 @@
-import sys,os,cdms2,vcs
-import vcs
-src1=sys.argv[1]
-src2=sys.argv[2]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
+
+src1 = sys.argv[1]
+src2 = sys.argv[2]
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",time=slice(0,1),latitude=(-7,5),squeeze=1)
 x.plot(s,bg=1)
 fnm = "test_vcs_issue_960_labels_1.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src1
-ret = checkimage.check_result_image(fnm,src1,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src1)
 b=x.createboxfill()
 b.datawc_y1=-7
 b.datawc_y2=5
 x.plot(s,b,bg=1)
 fnm = "test_vcs_issue_960_labels_2.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src2
-ret += checkimage.check_result_image(fnm,src2,checkimage.defaultThreshold)
-sys.exit(ret)
+ret += regression.check_result_image(fnm, src2)
+sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_lambert.py b/testing/vcs/test_vcs_lambert.py
index 17a50209f8147589bba589a7c2334b938a688c69..c78e71e9b564018071760247c09d38abed002771 100644
--- a/testing/vcs/test_vcs_lambert.py
+++ b/testing/vcs/test_vcs_lambert.py
@@ -1,27 +1,10 @@
-import vcs,cdms2
-import os,sys
+import os, sys, cdms2, vcs, testing.regression as regression
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 iso = x.createisofill()
 p=x.createprojection()
 p.type="lambert"
-
 iso.projection = p
 x.plot(s(latitude=(20, 60),longitude=(-140,-20)), iso, bg=True)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_lambert.png"
-x.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_lambert.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_large_pattern_hatch.py b/testing/vcs/test_vcs_large_pattern_hatch.py
index 3bcf0827da3b051415b4c00f358d15423e1d4791..782cfb14b9f3dedbbfc917d321ac654be2b22b32 100644
--- a/testing/vcs/test_vcs_large_pattern_hatch.py
+++ b/testing/vcs/test_vcs_large_pattern_hatch.py
@@ -1,31 +1,12 @@
-import vcs
-import sys
-import os
-
-baseline = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1090, units="pixels")
-canvas.drawlogooff()
+import os, sys, vcs, testing.regression as regression
 
+canvas = regression.init()
 fillarea = vcs.createfillarea()
 fillarea.x = [[0, .33, .33, 0], [.33, .67, .67, .33], [.67, 1, 1, .67]]
 fillarea.y = [[0, 0, 1, 1]] * 3
 fillarea.style = ["solid", "pattern", "hatch"]
 fillarea.index = [1, 5, 5]
 fillarea.color = [50, 50, 50]
-
 canvas.plot(fillarea, bg=True)
-
-testImage = os.path.abspath("test_vcs_large_pattern_hatch.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baseline,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+fnm = os.path.abspath("test_vcs_large_pattern_hatch.png")
+regression.run(canvas, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_legend.py b/testing/vcs/test_vcs_legend.py
index 26beec7e5ebbf3558ff0ccbfcbecafdc0b64c03e..a352bc080ad2dc7761efc0550bdaaae5225e0a43 100644
--- a/testing/vcs/test_vcs_legend.py
+++ b/testing/vcs/test_vcs_legend.py
@@ -1,5 +1,4 @@
-import sys,os
-import argparse
+import os, sys, argparse, cdms2, MV2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -14,20 +13,10 @@ args = p.parse_args(sys.argv[1:])
 
 gm_type= args.gm
 src = args.src
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
 
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -81,7 +70,7 @@ fnm = "test_vcs_legend_%s_%s_ext1_%s_ext2_%s" % (gm_type.lower(),args.orientatio
 x.png(fnm)
 print "fnm:",fnm
 print "src:",src
-ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+ret = regression.check_result_image(fnm+'.png', src,regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_line_patterns.py b/testing/vcs/test_vcs_line_patterns.py
new file mode 100755
index 0000000000000000000000000000000000000000..848ebb110fa0d6ea338a967303603f4200cbaa51
--- /dev/null
+++ b/testing/vcs/test_vcs_line_patterns.py
@@ -0,0 +1,20 @@
+import vcs
+import cdms2
+import sys
+import os
+import testing.regression as regression
+
+
+pth = os.path.join(os.path.dirname(__file__), "..")
+sys.path.append(pth)
+
+x = regression.init(bg=1, geometry=(1620, 1080))
+
+f = cdms2.open(vcs.sample_data + "/clt.nc")
+s = f('clt')
+iso = x.createisoline()
+iso.level=[5, 50, 70, 95]
+iso.line = ['dot', 'dash', 'dash-dot', 'long-dash']
+x.plot(s,iso,continents=0)
+name = "test_vcs_line_patterns.png"
+regression.run(x, name)
diff --git a/testing/vcs/test_vcs_lon_axes_freak_out.py b/testing/vcs/test_vcs_lon_axes_freak_out.py
index ecb63f2a5dbf73031d8851b48c29616bd7935ff2..f18328f582a4a2c7e40c7f0fbcbd372867e69696 100644
--- a/testing/vcs/test_vcs_lon_axes_freak_out.py
+++ b/testing/vcs/test_vcs_lon_axes_freak_out.py
@@ -1,30 +1,11 @@
-import os,sys,vcs,cdms2
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt")
+s3 = f("clt",longitude=(0,360))
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt")
-s3=f("clt",longitude=(0,360))
-
-print s.shape,s3.shape
-
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 x.plot(s,bg=1)
 x.clear()
 x.plot(s3,bg=1)
-
-fnm = "test_lon_axes_freak_out.png"
-
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_lon_axes_freak_out.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_markers.py b/testing/vcs/test_vcs_markers.py
index 50f4f00d1bd63ce47d6eddce3fe14acc0a8fbb65..21b7a671bcb45304841718f4b029b9f1f37819d1 100644
--- a/testing/vcs/test_vcs_markers.py
+++ b/testing/vcs/test_vcs_markers.py
@@ -1,28 +1,14 @@
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 m = x.createmarker()
-m.x=[[0.,],[5,],[10.,],[15.]]
-m.y=[[0.,],[5,],[10.,],[15.]]
+m.x = [[0.,],[5,],[10.,],[15.]]
+m.y = [[0.,],[5,],[10.,],[15.]]
 m.worldcoordinate=[-5,20,-5,20]
+
 #m.worldcoordinate=[-10,10,0,10]
 m.type=['plus','diamond','square_fill',"hurricane"]
 m.color=[242,243,244,242]
 m.size=[20,20,20,5]
 x.plot(m,bg=1)
-fnm= "test_markers.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_markers.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py
index 06b9f49b9b6fb71f2832bff44f5fae4b4f2b82d1..b7ba251f5bb58e02c76e5af504d8f6791d774e3a 100644
--- a/testing/vcs/test_vcs_matplotlib_colormap.py
+++ b/testing/vcs/test_vcs_matplotlib_colormap.py
@@ -1,7 +1,9 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
+import matplotlib
+sp = matplotlib.__version__.split(".")
+if int(sp[0])*10+int(sp[1])<15:
+    # This only works with matplotlib 1.5 and greater
+    sys.exit()
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,24 +12,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 canvas.setcolormap(vcs.matplotlib2vcs("viridis"))
-
 canvas.plot(clt, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_matplotlib_colormap.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_matplotlib_colormap.png")
diff --git a/testing/vcs/test_vcs_mercator_edge.py b/testing/vcs/test_vcs_mercator_edge.py
index eb6d79cdb7147777b38c04b548e555b108ce7e07..31f6cb83bb144b463ebad914e4c3739547c928cd 100644
--- a/testing/vcs/test_vcs_mercator_edge.py
+++ b/testing/vcs/test_vcs_mercator_edge.py
@@ -1,24 +1,9 @@
-import vcs,cdms2
-import os,sys
+import os, sys, cdms2, vcs, testing.regression as regression
+
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 iso = x.createisofill()
 iso.projection = "mercator"
 x.plot(s(latitude=(-90, 90)), iso, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_mercator_edge.png"
-x.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_mercator_edge.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_draw_mesh.py b/testing/vcs/test_vcs_meshfill_draw_mesh.py
index ef214e6485b8c753e6d24655804d1258a5b39929..08801d7a6d7e8a4f30e0cb714965b6de02725f9f 100644
--- a/testing/vcs/test_vcs_meshfill_draw_mesh.py
+++ b/testing/vcs/test_vcs_meshfill_draw_mesh.py
@@ -1,23 +1,11 @@
-import vcs,cdms2,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.drawlogooff()
-fnmcurv = os.path.join(vcs.sample_data,'sampleCurveGrid4.nc')
-f=cdms2.open(fnmcurv)
+import os, sys, cdms2, vcs, testing.regression as regression
 
-s=f("sample")
-m=x.createmeshfill()
-m.mesh=True
+x = regression.init()
+fnmcurv = os.path.join(vcs.sample_data,'sampleCurveGrid4.nc')
+f = cdms2.open(fnmcurv)
+s = f("sample")
+m = x.createmeshfill()
+m.mesh = True
 
 x.plot(s,m,bg=1)
-fnm = "test_meshfill_draw_mesh.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_meshfill_draw_mesh.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_no_wrapping.py b/testing/vcs/test_vcs_meshfill_no_wrapping.py
index 967758c78651290b1541efb45aed7b6ffb99924c..9ee4a99f1c9ce4cc5f11d8db324ffaf0b2b087e9 100755
--- a/testing/vcs/test_vcs_meshfill_no_wrapping.py
+++ b/testing/vcs/test_vcs_meshfill_no_wrapping.py
@@ -1,23 +1,7 @@
-#!/usr/bin/env python
-import cdms2 
-import os 
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-
-f=cdms2.open(sys.argv[2])
-h=f("heat")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+f = cdms2.open(sys.argv[2])
+h = f("heat")
+x = regression.init()
 x.plot(h, bg=1)
-fnm = "vcs_test_meshfill_no_wrapping.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "vcs_test_meshfill_no_wrapping.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_regular_grid.py b/testing/vcs/test_vcs_meshfill_regular_grid.py
index 9a955f9fffaeb4182cf027ee4f1775f12281a045..77a390b221ebd19e4e876b0f4296d870cfd8f371 100644
--- a/testing/vcs/test_vcs_meshfill_regular_grid.py
+++ b/testing/vcs/test_vcs_meshfill_regular_grid.py
@@ -1,22 +1,7 @@
-import vcs, cdms2, os, sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
+import os, sys, cdms2, vcs, testing.regression as regression
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x = vcs.init()
-x.setantialiasing(0)
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x.meshfill(s,bg=1)
-fnm = "test_meshfill_regular_grid.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.meshfill(s, bg=1)
+regression.run(x, "test_meshfill_regular_grid.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_vertices.py b/testing/vcs/test_vcs_meshfill_vertices.py
index 6317ef0719479a207aabc2ab50357cf0508b4680..103e681094dfc0e9179685d17b636e642d73d120 100644
--- a/testing/vcs/test_vcs_meshfill_vertices.py
+++ b/testing/vcs/test_vcs_meshfill_vertices.py
@@ -1,19 +1,8 @@
-import numpy
-import vcs
-import sys
-import os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, numpy, vcs, testing.regression as regression
 
-x=vcs.init()
-
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1090, units="pixels")
+x = regression.init()
 
 data_values = [ 25, 45, 55.]
-
 data_lon = [ 5., 10., 15.]
 data_lat = [ 5., 10., 15.]
 
@@ -50,8 +39,4 @@ m.levels = [20,30,50,70,80]
 m.mesh = True
 
 x.plot(numpy.array(data_values,),mesh,m,bg=True)
-x.png("test_vcs_meshfill_vertices_issue.png")
-src = sys.argv[1]
-ret = checkimage.check_result_image("test_vcs_meshfill_vertices_issue.png",
-                                    src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_meshfill_vertices_issue.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_zoom.py b/testing/vcs/test_vcs_meshfill_zoom.py
index 1026f0ff08d14585484e13d6ac2a2e712bc59451..c366d80100aa50e5011c2e6a6bce5958d8ce6386 100644
--- a/testing/vcs/test_vcs_meshfill_zoom.py
+++ b/testing/vcs/test_vcs_meshfill_zoom.py
@@ -1,14 +1,4 @@
-#!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-# We test if gm.datawc zooms in correctly into the plot. This works only for
-# data using a linear projection. It does not work for geographic projections.
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 flip = False
 if (len(sys.argv) == 3):
@@ -19,12 +9,10 @@ fileName = os.path.splitext(fileName)[0]
 if (flip):
     fileName = fileName + '_flip'
 fileName = fileName + '.png'
-f=cdms2.open(os.path.join(vcs.sample_data, "sampleCurveGrid4.nc"))
-s=f("sample")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-m=x.createmeshfill()
+f = cdms2.open(os.path.join(vcs.sample_data, "sampleCurveGrid4.nc"))
+s = f("sample")
+x = regression.init()
+m = x.createmeshfill()
 # m.mesh = True
 m.datawc_x1 = -20
 m.datawc_x2 = 20
@@ -33,7 +21,4 @@ if (flip):
 m.datawc_y1 = -20
 m.datawc_y2 = 20
 x.plot(s,m, bg=1)
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_mintics.py b/testing/vcs/test_vcs_mintics.py
index 01f65848b2cb2244e52468a5f8e6a33e1c60d5f6..302c22fd49698c9a18aec66ff2d752c16d237892 100644
--- a/testing/vcs/test_vcs_mintics.py
+++ b/testing/vcs/test_vcs_mintics.py
@@ -1,16 +1,10 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
 box = x.createboxfill()
+
 # Should ignore the string here
 box.xmtics1 = {i:"Test" for i in range(-180, 180, 15) if i % 30 != 0}
 box.ymtics1 = {i:"Test" for i in range(-90, 90, 5) if i % 10 != 0}
@@ -23,10 +17,4 @@ template.xmintic2.priority = 1
 template.xmintic2.y2 += template.xmintic1.y1 - template.xmintic1.y2
 template.ymintic2.priority = 1
 x.plot(s, template, box, bg=1)
-fnm = "test_vcs_mintics.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_mintics.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_missing_colorname.py b/testing/vcs/test_vcs_missing_colorname.py
index 1ed68b475072a84f902d4c379de7989e7a30333a..9b3db75458c629fa95e7aed04b46d62ea38dc35b 100644
--- a/testing/vcs/test_vcs_missing_colorname.py
+++ b/testing/vcs/test_vcs_missing_colorname.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -11,10 +8,7 @@ height, width = clt.shape
 clt.mask = [[True if i % 2 else False for i in range(width)] for _ in range(height)]
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 # Only have to test boxfill because all 2D methods use the same code
@@ -25,16 +19,4 @@ boxfill = canvas.createboxfill()
 boxfill.missing = "Medium Aquamarine"
 
 canvas.plot(clt, boxfill, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_missing_colorname.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_missing_colorname.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py b/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
index 9f30f40ae4f449f1098ef8eba2673f843639138c..4dcd059a028956165d0be07d4abdc5975f9b9254 100644
--- a/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
+++ b/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
@@ -1,27 +1,14 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-t=cdms2.createAxis(numpy.arange(120))
+x = regression.init()
+t = cdms2.createAxis(numpy.arange(120))
 t.designateTime()
-t.id="time"
-t.units="months since 2014"
+t.id = "time"
+t.units = "months since 2014"
 data = MV2.arange(120,0,-1)
-data.id="data"
+data.id = "data"
 data.setAxis(0,t)
-x=vcs.init()
-x.setantialiasing(0)
+x = regression.init()
 x.plot(data,bg=1)
 fnm = 'test_vcs_monotonic_decreasing_yxvsx_default.png'
-
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_no_continents.py b/testing/vcs/test_vcs_no_continents.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5c3e8d04e54af7d54c5b626a8255ba066486c3b
--- /dev/null
+++ b/testing/vcs/test_vcs_no_continents.py
@@ -0,0 +1,21 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+# Load the clt data:
+dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = dataFile("clt")
+clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
+          time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
+
+# Initialize canvas:
+canvas = regression.init()
+
+t1 = vcs.createtemplate()
+t1.scale(.5, "y")
+t1.move(-.15, "y")
+t2 = vcs.createtemplate(source=t1.name)
+t2.move(.5, 'y')
+
+canvas.plot(clt, t1, continents=0, bg=True)
+canvas.plot(clt, t2, continents=1, bg=True)
+
+regression.run(canvas, "test_vcs_no_continents.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_oned_level_axis.py b/testing/vcs/test_vcs_oned_level_axis.py
index 25479a56b9519d7cf43576d57e94a4cf22cd92e6..ecb708c773379144272261ee5879bf5f2caed372 100644
--- a/testing/vcs/test_vcs_oned_level_axis.py
+++ b/testing/vcs/test_vcs_oned_level_axis.py
@@ -1,24 +1,9 @@
+import os, sys, vcs, cdms2, cdutil, testing.regression as regression
 
-import sys,cdutil
-import vcs
-import os
-import cdms2
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-ta=f("ta",time=slice(0,1),squeeze=1)
-ta=cdutil.averager(ta,axis="yx")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
+ta = f("ta",time=slice(0,1),squeeze=1)
+ta = cdutil.averager(ta,axis="yx")
+x = regression.init()
 x.plot(ta,bg=1)
 fnm = "test_oned_level_axis.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_patterns.py b/testing/vcs/test_vcs_patterns.py
index fed4f41a839edb927d84cbee770227c7dd414684..c407f7cfee4e407f2efe8cd62d56dcc2e96e1d5e 100644
--- a/testing/vcs/test_vcs_patterns.py
+++ b/testing/vcs/test_vcs_patterns.py
@@ -1,19 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, vcs, cdms2, testing.regression as regression
 
 f = cdms2.open(vcs.sample_data+"/clt.nc")
 s = f("clt", time=slice(0, 1), squeeze=1)
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1090, units="pixels")
+x = regression.init()
 iso = vcs.createisofill("isoleg")
 iso.levels = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
 iso.fillareastyle = "pattern"
@@ -21,9 +10,4 @@ iso.fillareacolors = vcs.getcolors([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100])
 iso.fillareaindices = [1, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
 x.plot(s, iso, bg=1)
 fnm = "test_vcs_patterns.png"
-x.png(fnm)
-
-print "fnm:", fnm
-print "src:", src
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold+5.)
-sys.exit(ret)
+regression.run(x, fnm, threshold=regression.defaultThreshold+5.)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_plot_file_var.py b/testing/vcs/test_vcs_plot_file_var.py
index e20f947a672c70421c46be840d1d7d8d6c18c85c..0dd68945d9da5e83d527210d3ce6b8a4c88e69e5 100644
--- a/testing/vcs/test_vcs_plot_file_var.py
+++ b/testing/vcs/test_vcs_plot_file_var.py
@@ -1,8 +1,6 @@
-import vcs
-import os
-import sys
-import cdms2
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-V=f("clt")
-x=vcs.init()
-x.plot(V,bg=1)
+import os, sys, vcs, cdms2
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+V = f("clt")
+x = vcs.init()
+x.plot(V, bg=1)
diff --git a/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py b/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
index 4aa1d24eba1647a001ad3d99324a8cfbd0303621..535ff432ecff39d4c1ce630b7d6da32643e0da14 100644
--- a/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
+++ b/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
@@ -1,21 +1,7 @@
-import vcs
-import os,sys
-import cdms2
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-
-import checkimage
+import vcs, os, sys, cdms2, testing.regression as regression
 
 f = cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 s = f("sample")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 x.plot(s,bg=1)
-fnm = "test_plot_unstructured_via_boxfill.png"
-src = sys.argv[1]
-x.png(fnm)
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_plot_unstructured_via_boxfill.png")
diff --git a/testing/vcs/test_vcs_png_to_base64.py b/testing/vcs/test_vcs_png_to_base64.py
index 6b88b351141d33606af705469ecb44dca27776ad..f6d78e2fa9c6b37163e3841f8d3358d73ee8616b 100644
--- a/testing/vcs/test_vcs_png_to_base64.py
+++ b/testing/vcs/test_vcs_png_to_base64.py
@@ -1,4 +1,4 @@
-import vcs,numpy,cdms2,MV2,os,sys
+import vcs, numpy, cdms2, MV2, os, sys
 
 x = vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_png_window_resize.py b/testing/vcs/test_vcs_png_window_resize.py
index 0adeeb66ba8f10abfb5376e966a1e8d7a155796c..2adc55d15b533c07ae7eb46015ad6f6055bd5a0e 100644
--- a/testing/vcs/test_vcs_png_window_resize.py
+++ b/testing/vcs/test_vcs_png_window_resize.py
@@ -1,20 +1,9 @@
-import vcs
-import sys
-import os
+import vcs, sys, os, testing.regression as regression
 
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
+x = regression.init(bg=0)
 x.setantialiasing(0)
 x.drawlogooff()
 x.open(814,628)
 x.plot([1,2,3,4,5,6,7])
 fnm = __file__[:-3]+".png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_polar_set_opt_param_polar.py b/testing/vcs/test_vcs_polar_set_opt_param_polar.py
index 8c508ca3c01aaa0000acce77fd7c5e7be7142a77..4e777fb2b3cdcd0e62b07e23525476990f4a86a9 100644
--- a/testing/vcs/test_vcs_polar_set_opt_param_polar.py
+++ b/testing/vcs/test_vcs_polar_set_opt_param_polar.py
@@ -1,29 +1,13 @@
+import vcs, cdms2, sys, os, testing.regression as regression
 
-import vcs
-import cdms2
-import sys
-import os
-
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
-s=f("clt",slice(0,1),squeeze=1)
-x=vcs.init()
+f = cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
+s = f("clt",slice(0,1),squeeze=1)
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
-i=x.createisofill()
-p=x.getprojection("polar")
+i = x.createisofill()
+p = x.getprojection("polar")
 i.projection=p
 x.plot(s,i,bg=1)
-fnm= "test_polar_set_opt_param_polar.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, "test_polar_set_opt_param_polar.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_read_old_scr.py b/testing/vcs/test_vcs_read_old_scr.py
index 0a61df61e5f2dd260d19671ec1d6168a904030c8..4ae04d8491ae15ab148417e087bb01e4c9f609b8 100644
--- a/testing/vcs/test_vcs_read_old_scr.py
+++ b/testing/vcs/test_vcs_read_old_scr.py
@@ -71,3 +71,4 @@ else:
     assert(gm.ymtics1=="lat5")
     assert(gm.fillareastyle == "solid")
     assert(gm.fillareacolors == [30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 35, 36])
+sys.exit(0)
diff --git a/testing/vcs/test_vcs_remove_marker_none_1d.py b/testing/vcs/test_vcs_remove_marker_none_1d.py
index f868361b0f142bd72284ba77c2fdcdc5db5aab51..e112f86bc5a1d3405f803e82a313a567b158536c 100644
--- a/testing/vcs/test_vcs_remove_marker_none_1d.py
+++ b/testing/vcs/test_vcs_remove_marker_none_1d.py
@@ -3,31 +3,13 @@
 #
 # J-Y Peterschmitt - LSCE - 03/2015
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 dummy_data = numpy.arange(50, dtype=numpy.float32)
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 gm = x.createyxvsx('test_yxvsx')
 
-
-# Remove the marker
 gm.marker = None
-
 x.plot(gm, dummy_data,bg=1)
-
 fnm = "test_remove_marker_none_1d.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-# The end
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_setcolormap.py b/testing/vcs/test_vcs_setcolormap.py
index c3e63966007f7058f284e80f87af198a9bd87908..78d7fad817a3a1301ec4e67252987de51e5f2aa1 100644
--- a/testing/vcs/test_vcs_setcolormap.py
+++ b/testing/vcs/test_vcs_setcolormap.py
@@ -1,32 +1,12 @@
 
-import cdms2
-import os
-import sys
-import vcs
-
-baselineFilename = sys.argv[1]
-checkImagePath = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(checkImagePath)
-import checkimage
+import cdms2, os, sys, vcs, testing.regression as regression
 
 cdmsfile = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = cdmsfile('clt')
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 t=x.gettemplate('default')
 x.plot(data, t, bg=True)
 
 # This should force the image to update
 x.setcolormap('bl_to_drkorang')
-
-testFilename = "test_vcs_setcolormap.png"
-x.png(testFilename)
-
-ret = checkimage.check_result_image(testFilename,
-                                    baselineFilename,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_setcolormap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba.py b/testing/vcs/test_vcs_settings_color_name_rgba.py
index 0b6aeef22f1a96f2a9dc87928301ede5b978900f..0fa6ec2884bbb5fcb3c9e876bd33dd1f4a9612cb 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba.py
@@ -1,28 +1,13 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-data=f("clt",slice(0,1,))
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+data = f("clt",slice(0,1,))
 gm = x.createisofill()
 gm.levels = range(0,110,10)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_isofill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+fnm = "test_vcs_settings_color_name_rgba_isofill.png"
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_1d.py b/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
index 0fe844c2031c1b8ebad63c83e4e6798d36c1c7e2..8bca782a6816fe4919b667a77c6a794472c1b120 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
@@ -1,16 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt")[:,5,8]
@@ -18,10 +8,4 @@ gm = x.create1d()
 gm.linecolor="salmon"
 gm.markercolor = [0,0,100]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_1d.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_1d.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py b/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
index 043d331c8a5ab4e88f7791158e9c5809a3e09eca..34228513e5f68b2bf5adfb2406eee24b4c082780 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt",slice(0,1,))
 gm = x.createboxfill()
@@ -20,10 +9,4 @@ gm.levels = range(0,110,10)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_boxfill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_boxfill.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py b/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
index 200293dbee85bb1594002c7cb50a82f41b0da0e6..c23edc2a6f4dbdc63693782e0eaba400b43ca067 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x=regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt",slice(0,1,))
 gm = x.createisoline()
@@ -19,10 +8,4 @@ gm.levels = range(0,110,10)
 gm.linecolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_isoline.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_isoline.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py b/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
index 4a0858d1d1168f3755f7526bbfe320b38c27d8c5..0b3ffc8716219259a850d9237dde6c41b96d3a95 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 data=f("sample")
 gm = x.createmeshfill()
@@ -19,10 +8,4 @@ gm.levels = range(0,1501,150)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_meshfill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, 'test_vcs_settings_color_name_rgba_meshfill.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_star_triangle_markers.py b/testing/vcs/test_vcs_star_triangle_markers.py
index 4564e4059cacd207586f3d2eb13a5b633f547b5a..2d75e05d06bdd5bbf7d0a5995029fc2d5c028e98 100644
--- a/testing/vcs/test_vcs_star_triangle_markers.py
+++ b/testing/vcs/test_vcs_star_triangle_markers.py
@@ -1,9 +1,5 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 x=vcs.init()
 x.drawlogooff()
@@ -16,12 +12,5 @@ m.x = [[.1], [.3], [.5], [.7], [.9]]
 m.y = [[.1], [.3], [.5], [.7], [.9]]
 m.color = [200, 150, 160, 175, 125]
 m.size = [50, 50, 50, 50, 50]
-x.plot(m,bg=1)
-fnm = "test_star_triangle_markers.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(m, bg=1)
+regression.run(x, "test_star_triangle_markers.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_taylor_2quads.py b/testing/vcs/test_vcs_taylor_2quads.py
index 3458acb911997648b82e31a0e498f67fcdef0b16..84f1026156bae58d1210cd05e45a8d334482e862 100644
--- a/testing/vcs/test_vcs_taylor_2quads.py
+++ b/testing/vcs/test_vcs_taylor_2quads.py
@@ -1,17 +1,6 @@
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import vcs, MV2
+import sys, os, vcs, MV2, testing.regression as regression
 
-
-bg=True
-
-#
-# First create some sample data
-#
 data = MV2.array([[-0.50428531,-0.8505522 ,],
  [ 0.70056821,-0.27235352,],
  [ 0.05106154, 0.23012322,],
@@ -19,20 +8,8 @@ data = MV2.array([[-0.50428531,-0.8505522 ,],
  [ 0.85760801,-0.08336641,],
  [ 1.14083397,-0.78326507,]])
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-if bg:
-  x.setbgoutputdimensions(1200,1091,units="pixels")
-
-td=x.createtaylordiagram('new')
-
+x = regression.init()
+td = x.createtaylordiagram('new')
 td.quadrans = 2
-x.plot(data,td,skill = td.defaultSkillFunction,bg=bg)
-fnm = "test_vcs_taylor_2quads.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-if not bg:
-    raw_input("Press Enter")
-sys.exit(ret)
+x.plot(data, td, skill = td.defaultSkillFunction, bg=1)
+regression.run(x, "test_vcs_taylor_2quads.png")
diff --git a/testing/vcs/test_vcs_taylor_template_ctl.py b/testing/vcs/test_vcs_taylor_template_ctl.py
index 40b78f0dce11fd8263b0169ba6e83234e5265dfa..b6f610c7dabbcb21bfeb8c0d6bb2551dea0f04f5 100644
--- a/testing/vcs/test_vcs_taylor_template_ctl.py
+++ b/testing/vcs/test_vcs_taylor_template_ctl.py
@@ -1,20 +1,8 @@
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import vcs,MV2
+import sys, os, vcs, MV2
+import testing.regression as regression
 
-bg=True
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-if bg:
-  x.setbgoutputdimensions(1200,1091,units="pixels")
-if not bg:
-    x.open()
+x = regression.init()
 
 ## Create a template from the default taylor diagram
 t=x.createtemplate('mytaylor','deftaylor')
@@ -61,12 +49,5 @@ t.xmintic2.priority=1
 # Create some dummy data for display purposes
 data=MV2.array([[1.52,.52,],[.83,.84]])
 
-x.plot(data,t,td,bg=bg)
-fnm="test_vcs_taylor_template_ctl.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-if not bg:
-    raw_input("Press Enter")
-sys.exit(ret)
+x.plot(data, t, td, bg=1)
+regression.run(x, "test_vcs_taylor_template_ctl.png")
diff --git a/testing/vcs/test_vcs_textextents.py b/testing/vcs/test_vcs_textextents.py
new file mode 100644
index 0000000000000000000000000000000000000000..a26aa722bca782e6b3eced5433fffbcec5847038
--- /dev/null
+++ b/testing/vcs/test_vcs_textextents.py
@@ -0,0 +1,21 @@
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
+
+# We have to specify the geometry to make sure that the size of the canvas doesn't change between the init and the plot functions
+x = regression.init(bg=True, geometry=(1200,1091))
+text = x.createtext()
+text.string = ["A very very very very long string", "A\nmult-line\nstring", "Short"]
+# Use any value for initial; then we'll manually "right align" using the text extents
+text.x = [.1]
+text.y = [.1, .5, .9]
+
+# This function only gets the extents for the *current* size
+extents = x.gettextextent(text)
+# Now we'll manually populate this with the desired values
+text.x = []
+for min_x, max_x, min_y, max_y in extents:
+    w = max_x - min_x
+    #h = max_y - min_y
+    text.x.append(1 - w)
+
+x.plot(text, bg=1)
+regression.run(x, "test_textextents.png")
diff --git a/testing/vcs/test_vcs_user_passed_date.py b/testing/vcs/test_vcs_user_passed_date.py
index 643d2019cdc4de33839112b8fe4b396413eb0736..bb5b18a0d3c481b012687580a01e5d6d26c8a6c8 100644
--- a/testing/vcs/test_vcs_user_passed_date.py
+++ b/testing/vcs/test_vcs_user_passed_date.py
@@ -1,18 +1,9 @@
 import vcs,cdms2,os,sys,cdtime
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
+
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 x.plot(s,bg=1,time=cdtime.comptime(2015))
 fnm = os.path.split(__file__)[1][:-3]+".png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_user_passed_date_as_string.py b/testing/vcs/test_vcs_user_passed_date_as_string.py
index cd5d777f3db8d1a9188720a7e2c71f293d7db329..e9bdf83e62ae7f358791db59ef48a55e38c2e192 100644
--- a/testing/vcs/test_vcs_user_passed_date_as_string.py
+++ b/testing/vcs/test_vcs_user_passed_date_as_string.py
@@ -1,18 +1,11 @@
 import vcs,cdms2,os,sys,cdtime
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
+
+x = regression.init()
+
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
 x.plot(s,bg=1,time='2015-02-23')
 fnm = os.path.split(__file__)[1][:-3]+".png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_vectors_missing.py b/testing/vcs/test_vcs_vectors_missing.py
index fd53c97e11e8c31b6e91597fbe215b3c82375cbd..5e8b08e5be9357e50caaa2a10c5eef3a8c208204 100644
--- a/testing/vcs/test_vcs_vectors_missing.py
+++ b/testing/vcs/test_vcs_vectors_missing.py
@@ -1,6 +1,6 @@
 
-import sys,os
-import argparse
+import sys, os, argparse
+import vcs, cdms2, vtk, MV2, numpy, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -14,28 +14,17 @@ if not args.show:
   src = args.src
   pth = os.path.join(os.path.dirname(__file__),"..")
   sys.path.append(pth)
-  import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-import numpy
-
 
 bg = not args.show
-
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
   x.setbgoutputdimensions(1200,1091,units="pixels")
 x.setcolormap("rainbow")
-gm=vcs.createvector()
+gm = vcs.createvector()
 gm.scale = args.scale
-nm_xtra=""
+nm_xtra = ""
 xtra = {}
 import cdms2
 import os
@@ -52,14 +41,12 @@ if args.show:
   pass
   #x.interact()
 else:
-  fnm = "test_vcs_vectors_missing" 
+  fnm = "test_vcs_vectors_missing"
   if args.scale!=1.:
     fnm+="_%.1g" % args.scale
   fnm+=nm_xtra
   x.png(fnm)
-  print "fnm:",fnm
-  print "src:",src
-  ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+  ret = regression.check_result_image(fnm+'.png', src, regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_vectors_robinson.py b/testing/vcs/test_vcs_vectors_robinson.py
index 49052c679f9a0ca35a03fc03a63624d2d6ed8ac1..5cde92c82cd5e20dee9e2cc5493b75496554119a 100644
--- a/testing/vcs/test_vcs_vectors_robinson.py
+++ b/testing/vcs/test_vcs_vectors_robinson.py
@@ -1,13 +1,7 @@
 import vcs, cdms2, numpy, os, sys
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 u = f("u")
 v = f("v")
@@ -16,8 +10,4 @@ p = x.createprojection()
 p.type = "robinson"
 V.projection = p
 x.plot(u,v,V, bg=1)
-
-fnm = "test_vcs_vectors_robinson.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_vectors_robinson.png")
diff --git a/testing/vcs/test_vcs_vectors_robinson_wrap.py b/testing/vcs/test_vcs_vectors_robinson_wrap.py
index 86491cc2f14da27a53e044538ded62881b9accaa..3cb30f06ffc29a5c90844cf450552830361f050f 100644
--- a/testing/vcs/test_vcs_vectors_robinson_wrap.py
+++ b/testing/vcs/test_vcs_vectors_robinson_wrap.py
@@ -1,13 +1,7 @@
 import vcs, cdms2, numpy, os, sys
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 lon1 = -180
 u = f("clt")
@@ -19,8 +13,4 @@ p = x.createprojection()
 p.type = "robinson"
 V.projection = p
 x.plot(u,v,V, bg=1)
-
-fnm = "test_vcs_vectors_robinson_wrap.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_vectors_robinson_wrap.png")
diff --git a/testing/vcs/test_vcs_vectors_scale_options.py b/testing/vcs/test_vcs_vectors_scale_options.py
new file mode 100644
index 0000000000000000000000000000000000000000..32898d129f8ac8d5b82678c67466a35c7d7863e6
--- /dev/null
+++ b/testing/vcs/test_vcs_vectors_scale_options.py
@@ -0,0 +1,59 @@
+import sys, cdms2, vcs, testing.regression as regression
+
+data = cdms2.open(vcs.sample_data+"/clt.nc")
+v = data['v'][...,::10,::10]
+u = data['u'][...,::10,::10]
+
+canvas = regression.init()
+gv = vcs.createvector()
+
+gv.scaletype = 'off'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_off.png'
+canvas.png(outFilename)
+ret = regression.check_result_image(outFilename, sys.argv[1])
+canvas.clear()
+
+v = data['v'][...,::4,::4]
+u = data['u'][...,::4,::4]
+gv.scaletype = 'constant'
+gv.scale = 0.1
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_constant.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[2])
+canvas.clear()
+
+v = data['v']
+u = data['u']
+gv.scale = 1.0
+
+gv.scaletype = 'linear'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_linear.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[3])
+canvas.clear()
+
+gv.scaletype = 'normalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_normalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[4])
+canvas.clear()
+
+gv.scaletype = 'constantNLinear'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_constantNLinear.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[5])
+canvas.clear()
+
+gv.scaletype = 'constantNNormalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_constantNNormalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[6])
+canvas.clear()
+
+sys.exit(ret)
diff --git a/testing/vcs/test_vcs_verify_boxfill_basics.py b/testing/vcs/test_vcs_verify_boxfill_basics.py
index 7d08a2b75f3c443b91fc3f80300a2703ff071c70..0768ff9f918b9b1b8f2d0191fe8b23a71a2f7992 100644
--- a/testing/vcs/test_vcs_verify_boxfill_basics.py
+++ b/testing/vcs/test_vcs_verify_boxfill_basics.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
@@ -12,12 +8,12 @@ b=x.createboxfill()
 assert(b.projection == "linear")
 assert(b.xticlabels1 == "*")
 assert(b.xticlabels2 == "*")
-assert(b.xmtics1 == "") 
+assert(b.xmtics1 == "")
 assert(b.xmtics2 == "")
 assert(b.yticlabels1 == "*")
 assert(b.yticlabels2 == "*")
-assert(b.ymtics1 == "")  
-assert(b.ymtics2 == "")  
+assert(b.ymtics1 == "")
+assert(b.ymtics2 == "")
 assert(numpy.allclose(b.datawc_x1, 1e+20))
 assert(numpy.allclose(b.datawc_x2, 1e+20))
 assert(numpy.allclose(b.datawc_y1, 1e+20))
@@ -71,12 +67,12 @@ assert(b.name == "test_b_ok")
 assert(b.projection == "test_bfill")
 assert(b.xticlabels1 == {23:"Hi"})
 assert(b.xticlabels2 == {23:"Hi"})
-assert(b.xmtics1 == {23:"Hi"}) 
+assert(b.xmtics1 == {23:"Hi"})
 assert(b.xmtics2 == {23:"Hi"})
 assert(b.yticlabels1 == {23:"Hi"})
 assert(b.yticlabels2 == {23:"Hi"})
-assert(b.ymtics1 == {23:"Hi"})  
-assert(b.ymtics2 == {23:"Hi"})  
+assert(b.ymtics1 == {23:"Hi"})
+assert(b.ymtics2 == {23:"Hi"})
 assert(numpy.allclose(b.datawc_x1, 56.7))
 assert(numpy.allclose(b.datawc_x2, 56.7))
 assert(numpy.allclose(b.datawc_y1, 56.7))
diff --git a/testing/vcs/test_vcs_verify_proj_basics.py b/testing/vcs/test_vcs_verify_proj_basics.py
index f1e1fa0a90590cfa430e3d6d66c7621fb9b5dcd9..822a6fc0ba82b47ed4029607768f8cc120c16785 100644
--- a/testing/vcs/test_vcs_verify_proj_basics.py
+++ b/testing/vcs/test_vcs_verify_proj_basics.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_wmo_marker.py b/testing/vcs/test_vcs_wmo_marker.py
index b4478372f0528b46026ed19508207ef1058b9c25..854e4a9beea56171f0d1b2ec44ce1d08de31742c 100644
--- a/testing/vcs/test_vcs_wmo_marker.py
+++ b/testing/vcs/test_vcs_wmo_marker.py
@@ -1,14 +1,10 @@
 
 import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+
+import testing.regression as regression
+x = regression.init()
+
 
 m = x.createmarker()
 M=1
@@ -22,8 +18,5 @@ x.plot(m,bg=1)
 fnm = 'wmo_marker.png'
 x.png(fnm)
 
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "wmo_marker.png")
 
diff --git a/testing/vcs/test_vcs_wmo_markers.py b/testing/vcs/test_vcs_wmo_markers.py
index 5785e0925526d90ee090d9281e72d6114d3d0d06..5162eeeee5ad9834d655a103989f1e91d7113d5d 100644
--- a/testing/vcs/test_vcs_wmo_markers.py
+++ b/testing/vcs/test_vcs_wmo_markers.py
@@ -1,26 +1,20 @@
 
 import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
 wmo = ['w00', 'w01', 'w02', 'w03', 'w04', 'w05', 'w06', 'w07', 'w08', 'w09',
        'w10', 'w11', 'w12', 'w13', 'w14', 'w15', 'w16', 'w17', 'w18', 'w19',
-       'w20', 'w21', 'w22', 'w23', 'w24', 'w25', 'w26', 'w27', 'w28', 'w29', 
+       'w20', 'w21', 'w22', 'w23', 'w24', 'w25', 'w26', 'w27', 'w28', 'w29',
        'w30', 'w31', 'w32', 'w33', 'w34', 'w35', 'w36', 'w37', 'w38', 'w39',
        'w40', 'w41', 'w42', 'w43', 'w44', 'w45', 'w46', 'w47', 'w48', 'w49',
        'w50', 'w51', 'w52', 'w53', 'w54', 'w55', 'w56', 'w57', 'w58', 'w59',
-       'w60', 'w61', 'w62', 'w63', 'w64', 'w65', 'w66', 'w67', 'w68', 'w69', 
+       'w60', 'w61', 'w62', 'w63', 'w64', 'w65', 'w66', 'w67', 'w68', 'w69',
        'w70', 'w71', 'w72', 'w73', 'w74', 'w75', 'w76', 'w77', 'w78', 'w79',
        'w80', 'w81', 'w82', 'w83', 'w84', 'w85', 'w86', 'w87', 'w88', 'w89',
        'w90', 'w91', 'w92', 'w93', 'w94', 'w95', 'w96', 'w97', 'w98', 'w99',
        'w100', 'w101', 'w102']
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 
 m = x.createmarker()
 M=7
@@ -37,13 +31,6 @@ for Y in range(7):
 m.x = xs
 m.y = ys
 m.list()
-x.plot(m,bg=1)
-fnm = "wmo_markers.png"
-x.png(fnm)
-
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(m, bg=1)
+regression.run(x, "wmo_markers.png");
 
diff --git a/testing/vcs/vtk_ui/vtk_ui_test.py b/testing/vcs/vtk_ui/vtk_ui_test.py
index facfd7f45123d944454984984cf9f9a50762d0fd..d5cbe18bfcb2ee7166ae12e294f39b97dc503a29 100644
--- a/testing/vcs/vtk_ui/vtk_ui_test.py
+++ b/testing/vcs/vtk_ui/vtk_ui_test.py
@@ -1,8 +1,4 @@
-import vtk
-import vcs.vtk_ui
-import os
-import sys
-import time
+import os, sys, time, vtk, vcs.vtk_ui
 
 
 def init():
@@ -101,15 +97,13 @@ class vtk_ui_test(object):
     def check_image(self, compare_against):
         """
         Checks the current render window's output against the image specified in the argument,
-        returns the result of checkimage.check_result_image
+        returns the result of regression.check_result_image
         """
         generate_png(self.win, self.test_file)
         pth = os.path.join(os.path.dirname(__file__), "../..")
         sys.path.append(pth)
-        import checkimage
-        print "fnm:", self.test_file
-        print "src:", compare_against
-        return checkimage.check_result_image(self.test_file, compare_against, checkimage.defaultThreshold)
+        import testing.regression as regression
+        return regression.check_result_image(self.test_file, compare_against)
 
     def test(self):
         self.do_test()
diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt
index 64b8d1755bb8c837676ae2eacb26ae9380e081fb..d6b382fafb47e9d646854983cb97c95454fa116b 100644
--- a/testing/vcsaddons/CMakeLists.txt
+++ b/testing/vcsaddons/CMakeLists.txt
@@ -35,6 +35,55 @@ cdat_add_test(vcs_addons_test_EzTemplate_12_plots_spacing
   ${cdat_SOURCE_DIR}/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
   ${BASELINE_DIR}/test_EzTemplate_12_plots_spacing.png
 )
+cdat_add_test(vcs_addons_test_histogram_defaults
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
+  ${BASELINE_DIR}/test_vcs_addons_histogram_defaults.png
+)
+cdat_add_test(vcs_addons_test_histogram_inherit
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
+  ${BASELINE_DIR}/test_vcs_addons_histogram_inherit.png
+)
+cdat_add_test(vcs_addons_test_polar
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar.py
+  ${BASELINE_DIR}/test_vcs_addons_polar.png
+)
+cdat_add_test(vcs_addons_test_polar_inherit
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_inherit.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_inherit.png
+)
+cdat_add_test(vcs_addons_test_convert_arrays
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_convert_arrays.py
+)
+cdat_add_test(vcs_addons_test_polar_degrees
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_degrees.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_degrees.png
+)
+cdat_add_test(vcs_addons_test_polar_annual
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_annual.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_annual.png
+)
+cdat_add_test(vcs_addons_test_polar_diurnal
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_diurnal.png
+)
+cdat_add_test(vcs_addons_test_polar_seasonal
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_seasonal.png
+)
+cdat_add_test(vcs_addons_test_polar_semidiurnal
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_semidiurnal.png
+)
 
 if (CDAT_DOWNLOAD_SAMPLE_DATA)
   cdat_add_test(vcs_addons_EzTemplate_2x2
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
index b3cfa0fba6f9e79180f59e3500b90fa991c8fafb..9e8f25ab9f2581cf0f82918ce976174fae7f7d34 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
@@ -1,14 +1,8 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -20,9 +14,7 @@ for i in range(12):
     t.legend.priority=0 # Turn off legend
 fnm = "test_12_plot_one_leg_per_row.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
index 42e0f64e4c94915deaa6a69fa8cc958271c9d048..8daf50456f30b2f87fb4affbedbdc8440d14b7c9 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
@@ -1,17 +1,14 @@
+import os, sys, testing.regression as regression
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-import cdms,EzTemplate,vcs,sys
+import vcs
+from vcsaddons import EzTemplate
+import cdms,vcs,sys
 ## 12 plots 1 legend per row on the right
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
-bg=True
-M=EzTemplate.Multi(rows=4,columns=3)
+bg = True
+M = EzTemplate.Multi(rows=4,columns=3)
 M.legend.direction='vertical'
 for i in range(12):
     t=M.get(legend='local')
@@ -19,9 +16,7 @@ for i in range(12):
         t.legend.priority=0 # Turn off legend
 fnm = "test_12_plot_one_leg_per_row_right.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py
index 6a9d50284ddcda7b0df2c9cff1a244651d76d3a1..141d94e09bc0968aff71e009f227fda793627fb1 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py
@@ -1,5 +1,5 @@
 import vcs
-import EzTemplate
+form vcsaddons import EzTemplate
 
 M=EzTemplate.Multi(rows=2,columns=2)
 
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
index b6ca9eb34f759c6b7d46afc48783ad12dd2e6907..e941fba1619cdd2998bec3eb38c26514862ce458 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
@@ -1,14 +1,8 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -26,10 +20,8 @@ for i in range(12):
       t=M.get()
 
 fnm = "test_EzTemplate_12_plots_legd_direction.png"
-M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+M.preview(fnm, bg=bg)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
index 40899f264b62183fef0e16e3aa6eb475c54dabcc..60d35412731d08d8a867fc7d19acc567699a4d24 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
@@ -1,14 +1,8 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -25,9 +19,7 @@ for i in range(12):
       t=M.get()
 fnm = "test_EzTemplate_12_plots_margins_thickness.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
index 043e03de4981a2e71797ede26a02b74c704076e1..2e9be25214dcfd9770a7649e7e8137a0273b120c 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
@@ -1,12 +1,5 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
-
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 ## Initialize VCS
 x=vcs.init()
 x.drawlogooff()
@@ -24,9 +17,7 @@ for i in range(12):
 
 fnm = "test_EzTemplate_12_plots_mix_glb_local.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
index d17eb1a691200fdf23eb4351029d26ae23ef88dc..3b60a8d07a024b8dd3f436e8f6e4acbd3a3a4100 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
-
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -18,9 +11,7 @@ M.spacing.vertical=.1
 
 fnm = "test_EzTemplate_12_plots_spacing.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py b/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
index 2619fe5efc072c76327c1ecebbd6bc276cd330d4..ee645d16c0a51ee869daeef4a990ed64c1add380 100644
--- a/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
+++ b/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
@@ -1,21 +1,11 @@
+import os, sys, cdms2, testing.regression as regression, vcs, vcsaddons
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import vcsaddons
-import cdms2
-
-f=cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
-s=f("clt",time=slice(0,1),squeeze=1)
+f = cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
+s = f("clt",time=slice(0,1),squeeze=1)
 
 bg = True
-
-M=vcsaddons.EzTemplate.Multi(rows=2,columns=2)
-x=vcs.init()
+M = vcsaddons.EzTemplate.Multi(rows=2,columns=2)
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -25,9 +15,7 @@ for i in range(4):
 
 fnm = "test_vcs_addons_EzTemplate_2x2.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_convert_arrays.py b/testing/vcsaddons/test_vcs_addons_convert_arrays.py
new file mode 100644
index 0000000000000000000000000000000000000000..6e784e10b89cbc4569498f35f6d7ce73f10dffdc
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_convert_arrays.py
@@ -0,0 +1,70 @@
+import vcsaddons
+import numpy
+
+magnitudes = [1, 2, 3, 4]
+thetas = [5, 6, 7, 8]
+zipped_input = zip(magnitudes, thetas)
+grouped_zipped = [zipped_input[:2], zipped_input[2:]]
+
+one_array = numpy.array(zip(magnitudes, thetas))
+three_d_array = numpy.array(grouped_zipped)
+two_arrays = numpy.array(magnitudes), numpy.array(thetas)
+two_array_groups = numpy.array([magnitudes[:2], magnitudes[2:]]), numpy.array([thetas[:2], thetas[2:]])
+list_and_array = two_arrays[0], thetas
+two_lists = magnitudes, thetas
+lists_of_arrays = [two_arrays[0]], [two_arrays[1]]
+array_and_list = magnitudes, two_arrays[1]
+one_list_tuples = zip(magnitudes, thetas)
+one_list_grouped_tuples = [zip(magnitudes[:2], thetas[:2]), zip(magnitudes[2:], thetas[2:])]
+one_list_of_arrays = [numpy.array(zip(magnitudes[:2], thetas[:2])), numpy.array(zip(magnitudes[2:], thetas[2:]))]
+
+
+def compare(input, expected):
+    result = vcsaddons.polar.convert_arrays(*input)
+    print "Checking", result[0:2], "vs", expected
+    assert result[0] == expected[0]
+    assert result[1] == expected[1]
+
+grouped = ([magnitudes[:2], magnitudes[2:]], [thetas[:2], thetas[2:]])
+
+compare((one_array, None), ([magnitudes], [thetas]))
+compare(two_arrays, ([magnitudes], [thetas]))
+compare(two_array_groups, grouped)
+three_d_expected = ([[1, 2], [3, 4]], [[5, 6], [7, 8]])
+compare((three_d_array, None), three_d_expected)
+compare(list_and_array, ([magnitudes], [thetas]))
+compare(two_lists, ([magnitudes], [thetas]))
+compare(lists_of_arrays, ([magnitudes], [thetas]))
+compare(array_and_list, ([magnitudes], [thetas]))
+compare((one_list_tuples, None), ([[i] for i in magnitudes], [[i] for i in thetas]))
+compare((one_list_grouped_tuples, None), grouped)
+compare((one_list_of_arrays, None), grouped)
+
+
+def test_error(input, error):
+    try:
+        vcsaddons.polar.convert_arrays(*input)
+    except:
+        print "Got", error
+    else:
+        assert False, "Should have raised a %s" % error
+
+# Test error conditions
+
+# Single arg:
+
+# List of 3d arrays
+test_error(([numpy.array([[[1, 2]]])], None), "ValueError for list of 3d arrays")
+# >2 element arrays
+test_error(([numpy.array([[1, 2, 3]])], None), "ValueError for list of 3-element arrays")
+# <2 element arrays
+test_error(([numpy.array([[1]])], None), "ValueError for list of 1-element arrays")
+# Wrong-sized lists
+test_error(([[(1, 2, 3)]], None), "ValueError for wrong sized lists.")
+
+
+# Two args:
+
+# Too many dimensions
+test_error((numpy.array([[[1, 2]]]), numpy.array([[1, 2]])), "ValueError for too many dimensions for magnitude.")
+test_error((numpy.array([[1, 2]]), numpy.array([[[1, 2]]])), "ValueError for too many dimensions for magnitude.")
diff --git a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
new file mode 100644
index 0000000000000000000000000000000000000000..090aaf33d819d8d7ec68b6a34f90172f034298ad
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
@@ -0,0 +1,17 @@
+import sys,os
+src = sys.argv[1]
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy
+
+x = regression.init()
+
+numpy.random.seed(seed=12345)
+vals = numpy.random.random_sample(2000) * 100
+histo = vcsaddons.histograms.Ghg()
+histo.plot(vals, bg=True, x=x)
+
+fnm = "test_vcs_addons_histogram_defaults.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
new file mode 100644
index 0000000000000000000000000000000000000000..a977a68bbe4d81d865dcab8c9ac5262e250669c1
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
@@ -0,0 +1,58 @@
+import sys,os
+src = sys.argv[1]
+import testing.regression as regression
+import vcs, cdms2
+import vcsaddons, numpy
+
+x = regression.init()
+
+cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc")
+clt = cdmsfile("clt")
+
+levels = [10, 20, 30, 40, 60, 70, 80, 90, 100]
+histo = vcsaddons.histograms.Ghg()
+histo.bins = levels
+histo.line = ["solid", "dash", "dash-dot"]
+histo.linewidth = [1, 2, 3]
+histo.linecolors = ["red", "green", "blue"]
+histo.fillareastyles = ["solid", "hatch", "pattern", "solid"]
+histo.fillareaindices = [1, 2, 3, 4]
+histo.fillareacolors = ["blue", "green", "red", "orange"]
+
+histo2 = vcsaddons.createhistogram(source=histo)
+
+print "Checking all inherited attributes..."
+assert histo2.bins == histo.bins
+assert histo2.line == histo.line
+assert histo2.linewidth == histo.linewidth
+assert histo2.linecolors == histo.linecolors
+assert histo2.fillareastyles == histo.fillareastyles
+assert histo2.fillareacolors == histo.fillareacolors
+assert histo2.fillareaindices == histo.fillareaindices
+print "Inherited all values."
+
+histo2.levels = [10, 20, 10, 100, 110, 50, 20]
+histo3 = vcsaddons.createhistogram(source=histo2.name, x=x)
+
+print "Checking name-based inheritance"
+assert histo3.bins == histo2.bins
+assert histo3.line == histo2.line
+assert histo3.linewidth == histo2.linewidth
+assert histo3.linecolors == histo2.linecolors
+assert histo3.fillareastyles == histo2.fillareastyles
+assert histo3.fillareacolors == histo2.fillareacolors
+assert histo3.fillareaindices == histo2.fillareaindices
+print "Inherited all values."
+
+histo3.datawc_y1 = -1
+histo3.datawc_y2 = 200000
+histo3.datawc_x1 = 0
+histo3.datawc_x2 = 100
+
+histo3.bins = None
+histo3.plot(clt, template="default", bg=True)
+
+fnm = "test_vcs_addons_histogram_inherit.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py
new file mode 100644
index 0000000000000000000000000000000000000000..5fcdc6c952e7b79fa86e739e16d3d7e645df92b2
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar.py
@@ -0,0 +1,23 @@
+import sys,os
+src = sys.argv[1]
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy
+
+x = regression.init()
+
+polar = vcsaddons.polar.Gpo()
+polar.markers = ["dot", "circle"]
+polar.markersizes = [3, 5]
+
+polar.magnitude_tick_angle = numpy.pi / 6
+
+theta = list(numpy.arange(0, 4 * numpy.pi + .01, numpy.pi / 24))
+magnitude = list(numpy.sin(theta))
+
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_annual.py b/testing/vcsaddons/test_vcs_addons_polar_annual.py
new file mode 100644
index 0000000000000000000000000000000000000000..5cea2bfc10a793443181ee0d02277b0b8439f949
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar_annual.py
@@ -0,0 +1,36 @@
+import sys,os
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy
+
+src = sys.argv[1]
+
+x = regression.init()
+
+polar = vcsaddons.getpolar("annual_cycle")
+polar.markers = ["dot"]
+polar.markersizes = [3]
+
+polar.magnitude_tick_angle = numpy.pi / 8
+
+import cdms2, cdutil
+
+f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = f("clt")
+cdutil.setAxisTimeBoundsMonthly(clt.getTime())
+averaged_time = cdutil.averager(clt, axis="t")
+averaged_time = averaged_time.reshape((1, averaged_time.shape[0], averaged_time.shape[1]))
+averaged_time_for_departures = numpy.repeat(averaged_time, len(clt), axis=0)
+
+clt_departures = clt - averaged_time_for_departures
+clt_departures.setAxisList(clt.getAxisList())
+avg_departures = cdutil.averager(clt_departures, axis="xy")
+
+theta = range(1, len(clt) + 1)
+magnitude = avg_departures
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar_annual.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
new file mode 100644
index 0000000000000000000000000000000000000000..1a44b6b0e6e58ffb52a249286c812bb8def34748
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
@@ -0,0 +1,24 @@
+import sys
+src = sys.argv[1]
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy
+
+x = regression.init()
+
+polar = vcsaddons.getpolar("degrees")
+polar.markers = ["dot", "circle"]
+polar.markersizes = [3, 5]
+
+polar.magnitude_tick_angle = numpy.pi / 6
+
+theta = numpy.array(range(0, 720, 2))
+magnitude = 9 * numpy.sin(5 * 2 * numpy.pi * theta / 360)
+polar.datawc_y1 = 0
+polar.datawc_y2 = max(magnitude)
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar_degrees.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
new file mode 100644
index 0000000000000000000000000000000000000000..24a6f832bcd736f19c2780d70beaded1e32862a2
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
@@ -0,0 +1,39 @@
+import sys,os
+src = sys.argv[1]
+import vcs
+import vcsaddons, numpy
+import cdms2, cdutil, cdtime
+import testing.regression as regression
+
+x = regression.init()
+
+f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
+temp = f('t')
+levels = temp.getLevel()
+time = temp.getTime()
+# Break up temp by level
+magnitudes = [temp[:,i] for i in range(temp.shape[1])]
+for i, mag in enumerate(magnitudes):
+    mag.id = "%0.f %s" % (levels[i], levels.units)
+
+times = []
+for t in time:
+    reltime = cdtime.relativetime(t, time.units)
+    comptime = reltime.tocomponent()
+    times.append(comptime.hour)
+
+thetas = [times] * len(magnitudes)
+
+polar = vcsaddons.getpolar("diurnal")
+polar.markers = ["dot"]
+polar.markersizes = [3]
+polar.markercolors = vcs.getcolors(list(levels))
+
+polar.magnitude_tick_angle = numpy.pi / 8
+
+polar.plot(magnitudes, thetas, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar_diurnal.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
new file mode 100644
index 0000000000000000000000000000000000000000..2eb10b7d8d5deab64aedc04ed479889d23d2d26d
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
@@ -0,0 +1,45 @@
+import sys,os
+src = sys.argv[1]
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy
+
+x = regression.init()
+
+gm = vcsaddons.polar.Gpo()
+gm.markers = ["dot", "circle"]
+gm.markersizes = [3, 5]
+gm.markercolors = ["red", "blue"]
+gm.clockwise = True
+gm.theta_offset = numpy.pi / 4
+gm.magnitude_ticks = [.2 * i for i in range(6)]
+gm.magnitude_tick_angle = numpy.pi / 10
+gm.theta_tick_count = 10
+gm.group_names = ["First", "Second"]
+
+polar = vcsaddons.polar.Gpo(source=gm)
+
+assert polar.markersizes == gm.markersizes
+assert polar.markercolors == gm.markercolors
+assert polar.markers == gm.markers
+assert polar.clockwise == gm.clockwise
+assert polar.theta_offset == gm.theta_offset
+assert polar.magnitude_ticks == gm.magnitude_ticks
+assert polar.magnitude_tick_angle == gm.magnitude_tick_angle
+assert polar.theta_tick_count == gm.theta_tick_count
+assert polar.group_names == gm.group_names
+
+polar.magnitude_tick_angle = numpy.pi / 6
+
+theta = list(numpy.arange(0, 4 * numpy.pi + .01, numpy.pi / 24))
+magnitude = list(numpy.sin(theta))
+
+theta = [theta[:len(theta) / 2], theta[len(theta) / 2:]]
+magnitude = [magnitude[:len(magnitude)/ 2], magnitude[len(magnitude) / 2:]]
+
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar_inherit.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
new file mode 100644
index 0000000000000000000000000000000000000000..0f5693a35f35d0145cfd5b658eedf1901a441290
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
@@ -0,0 +1,53 @@
+import sys,os
+src = sys.argv[1]
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy, MV2
+import cdms2, cdutil, cdtime
+
+x = regression.init()
+
+f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+# Trim first few months and last month so we have even number of seasons
+cloudiness = f('clt', time=(11, 119))
+cdutil.setAxisTimeBoundsMonthly(cloudiness.getTime())
+cloudiness_time_axis = cloudiness.getTime()
+averaged_seasons = MV2.zeros((36, 46, 72))
+# Average the seasons in cloudiness
+for i in range(36):
+    averaged_seasons[i] = cdutil.averager(cloudiness(time=(cloudiness_time_axis[i * 3], cloudiness_time_axis[(i+1) * 3])), axis="t")
+
+averaged_seasons.setAxis(1, cloudiness.getLatitude())
+averaged_seasons.setAxis(2, cloudiness.getLongitude())
+
+regions = {
+    "north_polar": (66, 90),
+    "north_temperate": (22, 66),
+    "tropics": (-22, 22),
+    "south_temperate": (-66, -22),
+    "south_polar": (-90, -66)
+}
+
+def get_region_avg(var, r, axis="xy"):
+    avg = cdutil.averager(var(latitude=regions[r]), axis=axis)
+    avg.id = r
+    return avg
+
+magnitudes = [get_region_avg(averaged_seasons, region) for region in regions]
+thetas = [range(4) * 27] * 5
+
+polar = vcsaddons.getpolar("seasonal")
+polar.datawc_y1 = 0
+polar.datawc_y2 = 100
+polar.markers = ["dot"]
+polar.markersizes = [3]
+polar.markercolors = vcs.getcolors([-90, -66, -22, 22, 66, 90], split=False)
+
+polar.magnitude_tick_angle = numpy.pi / 4
+
+polar.plot(magnitudes, thetas, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar_seasonal.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
new file mode 100644
index 0000000000000000000000000000000000000000..1ce3f21efa5230711135e07cad0a3201414c0691
--- /dev/null
+++ b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
@@ -0,0 +1,39 @@
+import sys,os
+src = sys.argv[1]
+import testing.regression as regression
+import vcs
+import vcsaddons, numpy
+import cdms2, cdutil, cdtime
+
+x = regression.init()
+
+f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
+temp = f('t')
+levels = temp.getLevel()
+time = temp.getTime()
+# Break up temp by level
+magnitudes = [temp[:,i] for i in range(temp.shape[1])]
+for i, mag in enumerate(magnitudes):
+    mag.id = "%0.f %s" % (levels[i], levels.units)
+
+times = []
+for t in time:
+    reltime = cdtime.relativetime(t, time.units)
+    comptime = reltime.tocomponent()
+    times.append(comptime.hour % 12)
+
+thetas = [times] * len(magnitudes)
+
+polar = vcsaddons.getpolar("semidiurnal")
+polar.markers = ["dot"]
+polar.markersizes = [3]
+polar.markercolors = vcs.getcolors(list(levels))
+
+polar.magnitude_tick_angle = numpy.pi / 8
+
+polar.plot(magnitudes, thetas, bg=True, x=x)
+
+fnm = "test_vcs_addons_polar_semidiurnal.png"
+x.png(fnm)
+ret = regression.check_result_image(fnm, src)
+sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcsaddons_preview_2x2.py b/testing/vcsaddons/test_vcsaddons_preview_2x2.py
index 754aa5ceaef64cc983996d3eb602819e36ae3361..a0318f25a9ea6268cd5b3f8573e0548ec22bb0ed 100644
--- a/testing/vcsaddons/test_vcsaddons_preview_2x2.py
+++ b/testing/vcsaddons/test_vcsaddons_preview_2x2.py
@@ -1,22 +1,12 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcsaddons
+import os, sys, testing.regression as regression, vcsaddons
 
 bg = True
-
-M=vcsaddons.EzTemplate.Multi(rows=2,columns=2)
+M = vcsaddons.EzTemplate.Multi(rows=2,columns=2)
 if bg:
   M.x.setbgoutputdimensions(1200,1091,units="pixels")
 fnm = "test_vcsaddons_preview_2x2.png"
 M.preview(out=fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/xmgrace/CMakeLists.txt b/testing/xmgrace/CMakeLists.txt
index e1de5fd9117633247396a2059776e7259da90d90..470aa056d087a569c2b9636634431022e371a69b 100644
--- a/testing/xmgrace/CMakeLists.txt
+++ b/testing/xmgrace/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test(flake8_xmgrace
-  "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/"
+cdat_add_test(flake8_xmgrace
+  flake8 "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/"
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=128 # Max line 128 not 80
diff --git a/tests/cdat/test_cdat.py b/tests/cdat/test_cdat.py
deleted file mode 100644
index 3c87d0c0bf5677739cb4fbef94490e1ea11d9b41..0000000000000000000000000000000000000000
--- a/tests/cdat/test_cdat.py
+++ /dev/null
@@ -1,500 +0,0 @@
-#!/usr/bin/env python
-version='%prog 1.0'
-usage = "usage: %prog [options] PACKAGE1, PACKAGE2, CONTRIB1, CONTRIB2, ..."
-import subprocess,os,sys
-import optparse
-import time
-import bz2,ftplib
-ftp_site = "climate.llnl.gov"
-ftp_dir = "Shadow"
-ftp_user = "cdat"
-ftp_password = "Changeme1"
-
-import cdat_info
-default_time_format = "%Y-%m-%d %H:%M:%S"
-
-def get_shadow_name(test_dir,test):
-    fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2'
-    path = list(os.path.split(fnm))
-    while path[0]!='':
-        tmp = os.path.split(path.pop(0))
-        path.insert(0,tmp[1])
-        path.insert(0,tmp[0])
-    fnm2 = '.'.join(path[1:])
-    return fnm,fnm2
-
-def get_shadow_ftp(test_dir,test):
-    fnm,ftpnm = get_shadow_name(test_dir,test)
-    f = open(fnm,"w")
-    try:
-        ftp=ftplib.FTP(ftp_site)
-        ftp.login(ftp_user,ftp_password)
-        ftp.cwd(ftp_dir)
-        ftp.retrbinary('RETR %s' % ftpnm, f.write)
-        ftp.close()
-        f.close()
-        f = open(fnm)
-        s=f.read()
-        f.close()
-        s = bz2.decompress(s)
-        f = open(fnm[:-4],"w") # open w/o bz2 ext
-        f.write(s)
-        f.close()
-        os.remove(fnm)
-    except Exception,err:
-        f.close()
-        os.remove(fnm)
-        pass
-    
-
-def get_shadow_local(test_dir,test):
-    fnm = os.path.join(test_dir,test)[:-3]+'.shadow'
-    if os.path.exists(fnm):
-        f=open(fnm,"r")
-        s=f.read()
-        f.close()
-        shadow_dict=eval(s)
-    else:
-        shadow_dict={}
-    return shadow_dict
-
-def get_shadow(test_dir,test):
-    # first try from ftp
-    get_shadow_ftp(test_dir,test)
-    return get_shadow_local(test_dir,test)
-
-def set_shadow_local(test_dir,test,dict):
-    try:
-        fnm = os.path.join(test_dir,test)[:-3]+'.shadow'
-        os.remove(fnm)
-    except:
-        pass
-    try:
-        fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2'
-        f=open(fnm,"w")
-        s= bz2.compress(repr(dict))
-        print >> f, s
-        f.close()
-    except Exception,err:
-        pass
-    return
-
-def put_shadow_ftp(test_dir,test):
-    fnm,ftpnm = get_shadow_name(test_dir,test)
-    try:
-        ftp=ftplib.FTP(ftp_site)
-        ftp.login(ftp_user,ftp_password)
-        ftp.cwd(ftp_dir)
-        f=open(fnm)
-        ftp.storbinary('STOR %s' % ftpnm, f)
-        ftp.close()
-        os.remove(fnm)
-    except Exception,err:
-        print 'Error putting ftp bz2',err
-        pass
-        
-def set_shadow(test_dir,test,dict):
-    set_shadow_local(test_dir,test,dict)
-    if int(o.upload)>0:
-        put_shadow_ftp(test_dir,test)
-    return
-
-def make_tests_string_machine(machine,dict):
-    details=""
-    details = "\t\t\tlast successful run: %s" % dict.get("last","never")
-    if dict.has_key("time"):
-        details+="\n\t\t\tduration (min,avg,max) %i, %i, %i seconds" % (dict["fastest"],dict["time"],dict["slowest"])
-    if dict.has_key("count") and o.verbose>1:
-        details+='\n\t\t\tSuccesfully tested %i times on at least : %i independent machines' % (dict["count"],len(dict["machines"]))
-    return details
-
-def make_tests_string(dict_all):
-    details=""
-    for os in dict_all.keys():
-        details += "\n\t\tOS: %s" % os
-        dict_os = dict_all[os]
-        for v in dict_os.keys():
-            details += "\n\t\t  Version: %s" % v
-            dict_system = dict_os[v]
-            for m in dict_system.keys():
-                details += "\n\t\t   Machine: %s" % m
-                dict=dict_system[m]
-                details+='\n'+make_tests_string_machine(m,dict)
-    return details
-
-def run_dir(test_dir,lst):
-    lst.sort()
-    passed=True
-    output={}
-    for test in lst:
-        if test[-3:]=='.py' and (test.lower()[:4]=='test' or test.lower()[:6]=='cdtest'):
-            Dict_all = get_shadow(test_dir,test)
-            if o.query_mode:
-                output[(test_dir,test)]=Dict_all
-                try:
-                    fnm = os.path.join(test_dir,test)[:-3]+'.shadow'
-                    os.remove(fnm)
-                except:
-                    pass
-                continue
-            myversion = ".".join(map(str,cdat_info.version()))
-            dict_all = Dict_all.get(myversion,{})
-            myos = os.uname()[0]
-            system = os.uname()[2]
-            machine = os.uname()[4]
-            dict_os = dict_all.get(myos,{})
-            dict_system = dict_os.get(system,{})
-            dict = dict_system.get(machine,{})
-            dict_system[machine] = dict
-            dict_os[system] = dict_system
-            dict_all[myos] = dict_os
-            details = ""
-            last = dict.get("last","1980-01-01 00:00:00") # ok ever ago!
-            format = dict.get("format",default_time_format)
-            tlast = time.strptime(last,format)
-            delta = time.mktime(tlast)-time.mktime(time.strptime(o.date,o.format))
-            if delta>0:
-                if o.verbose>0:
-                    print "\tRunning: %s" % (test)
-                    print "\t\tSuccessful run newer than threshold %s vs %s " % (last,o.date)
-                continue
-            if o.verbose>0:
-                print "\tRunning: %s" % (test)
-                if o.verbose<3 or dict_all.keys()==[]:
-                    details=make_tests_string_machine(machine,dict)
-                else:
-                    details+=make_tests_string(dict_all)
-                print details
-            t = time.time()
-            out,err= run_test(os.path.join(test_dir,test))
-            err2 = []
-            for l in err:
-                if l.find("Warning")>-1:
-                    pass
-                else:
-                    err2.append(l)
-            err=err2
-            t2 = time.time()
-            if err!=[]:
-                passed = False
-            if o.verbose>1:
-                for l in out:
-                    st='\t\t%s' % l.strip()
-                    print st
-            if o.verbose>0:
-                if err!=[]:
-                    print '\t        FAILED\n\n',err
-                    if o.verbose>1:
-                        for l in err:
-                            st='\t\t%s' % l.strip()
-                            print st
-                else:
-                    print '\t        PASSED\n\n'
-                    runtime = int(t2-t)+1
-                    fastest = dict.get("fastest",runtime+1)
-                    if fastest>runtime:
-                        fastest = runtime
-                    dict["fastest"]=fastest
-                    slowest = dict.get("slowest",runtime-1)
-                    if slowest<runtime:
-                        slowest = runtime
-                    dict["slowest"]=slowest
-                    dict["format"]=default_time_format
-                    dict["last"] = time.strftime(default_time_format,time.localtime())
-                    count=dict.get("count",0)
-                    count+=1
-                    dict["count"]=count
-                    avg = dict.get("time",0.)*(count-1)
-                    avg+=runtime
-                    avg/=count
-                    dict["time"] = avg
-                    machines = dict.get("machines",[])
-                    if int(o.upload)>1:
-                        mymachine = os.uname()[1]
-                    else:
-                        mymachine = "private"
-                    if not mymachine in machines:
-                        machines.append(mymachine)
-                        dict["machines"] = machines
-                        
-                    dict_system[machine] = dict
-                    dict_os[system] = dict_system
-                    dict_all[myos] = dict_os
-                    Dict_all[myversion] = dict_all
-                    output[(test_dir,test)]=dict
-                    if out==[] or str(out[-1]).lower().find('skipped')==-1:
-                        # ok the test havent been skipped
-                        # we can replace stat file
-                        set_shadow(test_dir,test,Dict_all)
-                    
-            if o.skip is False and passed is False:
-                sys.exit()
-    return output
-
-def run_test(test):
-    wd, test = os.path.split(test)
-    cmd = 'cd %s ; %s %s' % (wd, sys.executable, test)
-    if o.full_testing:
-        cmd+=' --full --extended'
-    if o.extended_testing:
-        cmd += ' --extended'
-    #print cmd
-    P=subprocess.Popen(cmd,stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True,shell=True)
-    
-    out = P.stdout.readlines()
-    err = P.stderr.readlines()
-    rmv =[]
-    for l in err:
-        for e in o.error_strings:
-            if l.find(e)>-1:
-                rmv.append(l)
-                break
-    for l in rmv:
-        err.remove(l)
-        
-    return out,err
-
-format = default_time_format
-date = time.strftime(format,time.localtime()) # Now!
-
-p=optparse.OptionParser(version=version,usage=usage)
-
-time_format_help_string = """format for time, default: %default                             
-Format can be constructed from the following keys:                        
-%a 	Locale's abbreviated weekday name.               
-%A 	Locale's full weekday name. 	              
-%b 	Locale's abbreviated month name.               
-%B 	Locale's full month name. 	                 
-%c 	Locale's appropriate date and time representation.                                      
-%d 	Day of the month as a decimal number [01,31]. 	
-%H 	Hour (24-hour clock) as a decimal number [00,23].                                            	
-%I 	Hour (12-hour clock) as a decimal number [01,12].                                                      	
-%j 	Day of the year as a decimal number [001,366]. 	                                             
-%m 	Month as a decimal number [01,12]. 	                 
-%M 	Minute as a decimal number [00,59]. 	                 
-%p 	Locale's equivalent of either AM or PM.        
-%S 	Second as a decimal number [00,61]. 	          
-%U 	Week number of the year (Sunday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Sunday are considered to be in week 0. 	                      
-%w 	Weekday as a decimal number [0(Sunday),6].                             	
-%W 	Week number of the year (Monday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Monday are considered to be in week 0.                                           
-%x 	Locale's appropriate date representation. 	         
-%X 	Locale's appropriate time representation. 	            
-%y 	Year without century as a decimal number [00,99].                        	              
-%Y 	Year with century as a decimal number. 	              
-%Z 	Time zone name (no characters if no time zone exists).                                               	
-%% 	A literal "%" character.                   
-"""
-
-## Adds options to test utility
-p.add_option("-a","--all","-A","--ALL",dest="all",help="Run test for ALL Packages and contributed Packages",action="store_true",default=False)
-p.add_option("-P","--packages",dest="all_packages",help="Run test on all packages",action="store_true",default=False)
-p.add_option("-C","--contribs",dest="all_contrib",help="Run test on all contributed packages",action="store_true",default=False)
-p.add_option("-p","--package",dest="Packages",metavar="PACKAGE",help="Run test on this package",action="append",type="string",default=[])
-p.add_option("-c","--contrib","--contributed",dest="Contribs",metavar="CONTRIBUTED",help="Run test on this contributed package",action="append",type="string",default=[])
-p.add_option("-s","--stop","--noerror",dest="skip",help="Stop on errors (default: %default)",action="store_false",default=False)
-p.add_option("-S","--nostop","--skip",dest="skip",help="Do not stop on errors",action="store_true",default=False)
-p.add_option("-v","--verbose",metavar="LEVEL",dest="verbose",help="Level of verbosity (0, 1, 2 or 3), default is %default",type="choice",default="1",choices=("0","1","2","3"))
-p.add_option("-u","--upload",metavar="LEVEL",dest="upload",help="Level of upload privacy (0, 1, or 2), 0 no data uploaded, 1 no private data uploaded, 2 uploads hostname, default is %default",type="choice",default="2",choices=("0","1","2"))
-p.add_option("-e","--okerror",metavar="ERROR STRING",dest="error_strings",help="Identify 'none' error merror messages (removes lines in error messages containing this)",default=["ppmtogif","pnmcolormap","pnmremap","ppmtogif","ppmquant","pnmcrop","Definition of","DeprecationWarning","self.nxo"],action="append",type="string")
-p.add_option("-d","--date",dest="date",type="string",help="Will run a test if last successfull time is older than 'date', default is now: %default                                      See --timeformat option for date format",default=date)
-p.add_option("-f","--timeformat",dest="format",type="string",help=time_format_help_string,default=default_time_format)
-p.add_option("-q","--query_mode",dest="query_mode",help="Runs a query of successfully run test only, does not execute anything",action="store_true",default=False)
-p.add_option("-F","--full",dest="full_testing",help="Full testing (more detailed testing) default is %default",default=False,action="store_true")
-
-
-# short test is default -jd082007
-p.add_option("-E","--extended",dest="extended_testing",help="Extended testing (runs testing completely) default is %default",default=False,action="store_true")
-
-
-(o,args) = p.parse_args()
-
-if int(o.upload)==2 and o.query_mode is False:
-    print 'Your upload level is set to 2\nThis means CDAT will recover your machine\'s name (only when running the test suite).\nTo turn this off use option: --upload=1 (no private data uploaded) or 0 (no data uploaded at all)'
-    print "Your machine's name (%s) will be stored for statistical purposes only" % os.uname()[1]
-    cont = raw_input("Do you wish to continue? (y/n) [y]")
-    if not cont.lower() in ['','y','yes']:
-        sys.exit()
-try:
-    time.strptime(o.date,o.format)
-except:
-    p.error('date must be in format: "%s", or specify format on command line (use --help)' % o.format)
-# Handles case where nothing is passed!
-if not (o.all_packages or o.all_contrib or o.all) and o.Packages==[] and o.Contribs==[] and args==[]:
-    (o,args) = p.parse_args(["-h"])
-
-if o.all:
-    o.all_packages=True
-    o.all_contrib=True
-
-# Append all the Packages
-packages=[]
-pckgs = os.listdir("Packages")
-pckgs.sort()
-for pk in pckgs:
-    if pk in ['cmor','cdms','regrid','Properties']:
-        continue
-    if os.path.isdir(os.path.join("Packages",pk)):
-        lst=[]
-        try:
-            dr = os.path.join("Packages",pk,"Test")
-            lst = os.listdir(os.path.join("Packages",pk,"Test"))
-        except:
-            pass
-        try:
-            lst = os.listdir(os.path.join("Packages",pk,"test"))
-        except:
-            pass
-        if lst!=[]:
-            packages.append(pk)
-            
-if o.all_packages:
-    for pk in packages:
-        if not pk in o.Packages:
-            o.Packages.append(pk)
-            
-contribs=o.Contribs
-if contribs==[]:
-    pckgs = os.listdir("contrib")
-    pckgs.sort()
-    for pk in pckgs:
-        if pk in ['spanlib']:
-            try:
-                import spanlib
-            except:
-                continue
-        if os.path.isdir(os.path.join("contrib",pk)):
-            lst=[]
-            try:
-                lst = os.listdir(os.path.join("contrib",pk,"Test"))
-            except:
-                pass
-            try:
-                lst = os.listdir(os.path.join("contrib",pk,"test"))
-            except:
-                pass
-            if lst!=[] and pk not in o.Contribs:
-                # first try to see if contrib has been built
-                contribs.append(pk)
-            
-if o.all_contrib:
-    for pk in contribs:
-        if pk not in o.Contribs:
-            o.Contribs.append(pk)
-
-#Now adds the extra arguments
-for pk in args:
-    ok=False
-    if pk in packages:
-        ok = True
-        if not pk in o.Packages:
-            o.Packages.append(pk)
-    if pk in contribs:
-        ok = True
-        if not pk in o.Contribs:
-            o.Contribs.append(pk)
-    if not ok:
-        if o.skip:
-            print 'Will skip Package:',pk
-        else:
-            print "Package %s does not exists or has not test suite" % pk
-            print 'type "%s --help" for help and usage' % sys.argv[0]
-            sys.exit()
-            
-        
-# Ok now runs the test to see if packages are good
-skipped=[]
-for pk in o.Packages:
-    if not pk in packages:
-        if o.skip:
-            print 'Will skip Package:',pk
-            skipped.append(pk)
-        else:
-            print "Package %s does not exists or has no test suite" % pk
-            print 'type "%s --help" for help and usage' % sys.argv[0]
-            sys.exit()
-for pk in skipped:
-    o.Packages.remove(pk)
-# Ok now runs the test to see if contribs are good
-skipped=[]
-for pk in o.Contribs:
-    if not pk in contribs:
-        if o.skip:
-            print 'Will skip Contributed Package:',pk
-            skipped.append(pk)            
-        else:
-            print "Contributed Package %s does not exists or has not test suite" % pk
-            print 'type "%s --help" for help and usage' % sys.argv[0]
-            print 'valid contributed packages: %s' % ' '.join(contribs)
-            sys.exit()
-for pk in skipped:
-    o.Contribs.remove(pk)
-o.verbose=int(o.verbose)
-results ={}
-for pk in o.Packages:
-    print "Running Test on Official Package: %s" % pk
-    test_dir = os.path.join("Packages",pk,"Test")
-    try:
-        lst = os.listdir(test_dir)
-    except:
-        test_dir = os.path.join("Packages",pk,"test")
-        lst = os.listdir(test_dir)
-    tmp = run_dir(test_dir,lst)
-    for k in tmp.keys():
-        results[k]=tmp[k]
-for pk in o.Contribs:
-    print "Running Test on Contributed Package: %s" % pk
-    test_dir = os.path.join("contrib",pk,"Test")
-    try:
-        lst = os.listdir(test_dir)
-    except:
-        test_dir = os.path.join("contrib",pk,"test")
-        lst = os.listdir(test_dir)
-    tmp = run_dir(test_dir,lst)
-    for k in tmp.keys():
-        results[k]=tmp[k]
-
-
-
-import cdat_info
-Packages=[]
-OS=[]
-Versions=[]
-Machines=[]
-CDATVersions=[]
-#code to display nicely all the results
-if o.query_mode:
-    for test in results.keys():
-        pnm =test[0]
-        if not pnm in Packages:
-            Packages.append(pnm)
-        CDATVersions=results[test]
-        oses = CDATVersions.get(str(cdat_info.version()),{})
-        for aos in oses.keys():
-            if not aos in OS:
-                OS.append(aos)
-            versions = oses[aos]
-            for v in versions.keys():
-                syst = versions[v]
-                for asys in syst:
-                    full = "%s_%s_%s" % (aos,v,asys)
-                    if not full in Versions:
-                        Versions.append(full)
-                    res = syst[asys]
-                    machines = res["machines"]
-                    for m in machines:
-                        if not m in Machines:
-                            Machines.append(m)
-    print 'Your version:',cdat_info.version()
-    print 'Total Test:',len(results.keys())
-    print 'Total Packages:',len(Packages)
-    print 'Total OS:',len(OS),'---',', '.join(OS)
-    print 'Total OS Versions:',len(Versions)
-    print 'Total Independent Machines:',len(Machines)
-## else:
-##     for test_dir,test in results.keys():
-##          print '\n\n'
-##          fn = test_dir+test
-##          print fn,'--------------'
-##          tr = results[test_dir,test]
-##          for t in tr:
-##               print '\t',t,':  ' ,tr[t] 
diff --git a/tests/cdat/test_exsrc_ok.py b/tests/cdat/test_exsrc_ok.py
deleted file mode 100644
index 923dfc9d7a798475e7e4de7dcf7e270583b43b0d..0000000000000000000000000000000000000000
--- a/tests/cdat/test_exsrc_ok.py
+++ /dev/null
@@ -1,107 +0,0 @@
-""" Test external packages dependencies for CDAT
-Prints out Packages that need to be installed and why
-"""
-import sys,os
-
-## Test 1: Pyfort
-min_ver=8.5
-a=os.popen4(sys.prefix+'/bin/pyfort -V')[1].readlines()
-sp=a[0].split()
-if sp[0]!='Pyfort':
-    print 'Pyfort : Not Present in your python distribution'
-elif float(sp[1])<min_ver:
-    print 'Pyfort : Version '+str(min_ver)+' minimum is required, you have: '+sp[1]
-
-## Test 2: Numeric
-min_ver=23.1
-try:
-    import Numeric
-    if float(Numeric.__version__)<min_ver:
-        print 'Numeric : Version '+str(min_ver)+' minimum is required, you have: '+Numeric.__version__
-except:
-    print 'Numeric : Not Present in your python distribution'
-
-## Test 3: Pmw
-min_ver=1.2
-try:
-    import Pmw
-    if float(Pmw.version())<min_ver:
-        print 'Pmw : Version '+str(min_ver)+' minimum is required, you have: '+Pmw.version()
-except:
-    print 'Pmw : Not Present in your python distribution'
-
-## Test 4: gplot
-a=os.popen4('which gplot')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'gplot : Not present on your system'
-
-## Test 5: xgks
-if not os.path.exists(sys.prefix+'/lib/xgksfonts'):
-    print 'xgks : xgksfonts directory not present in your python distribution'
-
-## Test 6: gifsicle
-a=os.popen4('which gifsicle')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'gifsicle : Not present on your system'
-
-## Test 7: ghostscript and  fonts
-a=os.popen4('which gs')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'ghostscript : Not present on your system'
-else:
-    jpeg=0
-    png=0
-    a=os.popen4('gs -h')[1].readlines()
-    while a.pop(0).find('Available devices:')<0:
-        continue
-    for l in a:
-        s=l.strip().split()
-        if 'jpeg' in s:
-            jpeg=1
-        if 'png16' in s:
-            png=1
-            
-    font=0
-    a=os.popen4('gs -h')[1].readlines()
-    while a.pop(0).find('Search path:')<0:
-        continue
-    for l in a:
-        if l[0]==' ': # path lines starts with blank
-            s=l.strip().split(':')
-            for p in s:
-                #print os.listdir(p.strip())
-                if os.path.exists(p.strip()+'/n022003l.afm'):
-                    font=1
-        else:
-            break
-    if jpeg==0 and png==0 and font==0:
-        print 'ghostscript : no jpeg nor png support built, missing extra fonts'
-    elif jpeg==0 and png==0:
-        print 'ghostscript : no jpeg nor png support built'
-    elif jpeg==0:
-        print 'ghostscript : no jpeg support built'
-    elif png==0:
-        print 'ghostscript : no png support built'
-    elif font==0:
-        print 'ghostscript : extra fonts not installed'
-            
-## Test 8: Netpbm/pbmplus
-a=os.popen4('which ppmtogif')[1].readlines()[0]
-if a.find('not found')>-1:
-    if sys.platform in ['linux2','darwin','cygwin']:
-        print 'netpbm : Not present on your system'
-    else:
-        print 'pbmplus : Not present on your system'
-
-
-## Test 9: R libraries (not python module)
-a=os.popen4('which R')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'R : Not present on your system'
-
-## Test 10: VTK
-try:
-    import vtk
-except:
-       print 'VTK : Not present on your Python'
- 
diff --git a/tests/test_script b/tests/test_script
deleted file mode 100755
index 883bb3b5b7bcf8ce799ef3f9a3d465f7cf261c4b..0000000000000000000000000000000000000000
--- a/tests/test_script
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/sh
-# usage: ./test_script [-v] [targetdir]
-# -v prevents run of vcdat
-#
-unset PYTHONPATH
-unset PYTHONHOME
-unset PYTHONSTARTUP
-if (test "$1" = "-v") then
-    vopt=0; shift;
-else
-    vopt=1
-fi
-if (test $# -eq 0) then 
-    p=`which python`; v=`which vcdat`
-else
-    here=`pwd`;
-    cd $1/bin;
-    pdir=`pwd`;
-    p=$pdir/python; v=$pdir/vcdat;
-    cd $here
-fi
-prefix=`${p} -c "import sys; print sys.exec_prefix"`
-echo "Testing $p"
-(cd Packages/cdms/Test; $p cdtest.py) 
-if (test $vopt -eq 1) then
-    echo "Testing $v";
-    $v
-fi
-echo "-------------------------------------------------------------------"
-echo "Tests completed."
-
diff --git a/uvcdatspt/scripts/MHTScreenshots.py b/uvcdatspt/scripts/MHTScreenshots.py
deleted file mode 100644
index 4396c607432f159a8afc5b85ecffa27ea3f331bb..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/MHTScreenshots.py
+++ /dev/null
@@ -1,170 +0,0 @@
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-if len(sys.argv) != 3:
-    print 'Usage: pvbatch --symmetric MHTScreenshots.py <output file name> "<input file names>"'
-    sys.exit(1)
-
-print 'input file names are: ', sys.argv[2]
-print 'output file name is: ', sys.argv[1]
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-currentTimeStep = -1
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-tp_writers = []
-tp_views = []
-# ==================== end of specialized temporal parallelism sections ==================
-
-timeCompartmentSize = 8
-globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-in_msf_moc = CreateReader( MHTFileSeriesReader, [], sys.argv[2] )
-timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0]
-
-XYChartView1 = CreateView( CreateXYPlotView, sys.argv[1], 1, 549, 583, tp_views )
-XYChartView1.ShowAxis = [1, 1, 0, 0]
-XYChartView1.ShowAxisGrid = [1, 1, 0, 0]
-XYChartView1.AxisLabelsBottom = []
-XYChartView1.LegendLocation = 1
-XYChartView1.AxisLabelsLeft = []
-XYChartView1.ViewTime = 0.0
-XYChartView1.ShowLegend = 1
-XYChartView1.AxisRange = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-XYChartView1.AxisTitleFont = ['Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0']
-XYChartView1.AxisLabelColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
-XYChartView1.AxisTitleColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5]
-XYChartView1.ChartTitleColor = [0.0, 0.0, 0.0]
-XYChartView1.ChartTitleAlignment = 1
-XYChartView1.AxisColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
-XYChartView1.AxisLabelsTop = []
-XYChartView1.AxisLabelFont = ['Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0']
-XYChartView1.ShowAxisLabels = [1, 1, 1, 1]
-XYChartView1.AxisLabelNotation = [0, 0, 0, 0]
-XYChartView1.AxisLabelPrecision = [2, 2, 2, 2]
-XYChartView1.AxisGridColor = [0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95]
-XYChartView1.ChartTitle = ''
-XYChartView1.AxisLabelsRight = []
-XYChartView1.AxisBehavior = [0, 0, 0, 0]
-XYChartView1.AxisTitle = ['', '', '', '']
-XYChartView1.ChartTitleFont = ['Arial', '14', '0', '0']
-XYChartView1.AxisLogScale = [0, 0, 0, 0]
-
-DataRepresentation1 = Show() #GetDisplayProperties(in_msf_moc)
-DataRepresentation1.XArrayName = 'reader_mht_global'
-DataRepresentation1.SeriesVisibility = ['vtkOriginalIndices', '0']
-DataRepresentation1.SeriesVisibility = ['reader_mht_global', '1']
-
-IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views)
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MHTTemporalStatistics.py b/uvcdatspt/scripts/MHTTemporalStatistics.py
deleted file mode 100644
index 18cfca03c5f882e42cf11147e235f9120b618acd..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/MHTTemporalStatistics.py
+++ /dev/null
@@ -1,26 +0,0 @@
-print 'starting'
-import sys
-from paraview.simple import *
-
-if len(sys.argv) < 3:
-    print 'Usage: pvbatch MHTTemporalStatistics.py <output file name> <input file names>'
-    sys.exit(1)
-
-paraview.simple._DisableFirstRenderCameraReset()
-reader = MHTFileSeriesReader()
-print 'input file names are: ', sys.argv[2:len(sys.argv)]
-print 'output file name is: ', sys.argv[1]
-reader.FileName = sys.argv[2:len(sys.argv)]
-
-MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics()
-MultiBlockTemporalStatistics1.TimeStepType = 0
-MultiBlockTemporalStatistics1.SamplingMethod = 1
-MultiBlockTemporalStatistics1.TimeSpan = 0
-MultiBlockTemporalStatistics1.TimeStepLength = 1
-MultiBlockTemporalStatistics1.TimeCompartmentSize = 8
-
-writer = XMLMultiBlockDataWriter()
-writer.FileName = sys.argv[1]
-writer.UpdatePipeline()
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MOCScreenshots.py b/uvcdatspt/scripts/MOCScreenshots.py
deleted file mode 100644
index 1cb05ea8f06bc5eaf62750883e984e52c1138b56..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/MOCScreenshots.py
+++ /dev/null
@@ -1,535 +0,0 @@
-
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-if len(sys.argv) != 3:
-    print 'Usage: pvbatch --symmetric MOCScreenshots.py <output file name> "<input file names>"'
-    sys.exit(1)
-
-print 'input file names are: ', sys.argv[2]
-print 'output file name is: ', sys.argv[1]
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-currentTimeStep = -1
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-tp_writers = []
-tp_views = []
-# ==================== end of specialized temporal parallelism sections ==================
-
-timeCompartmentSize = 16
-globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-RenderView1 = CreateView( CreateRenderView, sys.argv[1], 1, 549, 583, tp_views )
-RenderView1.LightSpecularColor = [1.0, 1.0, 1.0]
-RenderView1.InteractionMode = '3D'
-RenderView1.UseTexturedBackground = 0
-RenderView1.UseLight = 1
-RenderView1.CameraPosition = [15.0, -2624.999755859375, 14496.62787197619]
-RenderView1.FillLightKFRatio = 3.0
-RenderView1.Background2 = [0.0, 0.0, 0.16470588235294117]
-RenderView1.FillLightAzimuth = -10.0
-RenderView1.LODResolution = 50.0
-RenderView1.BackgroundTexture = []
-RenderView1.KeyLightAzimuth = 10.0
-RenderView1.StencilCapable = 1
-RenderView1.LightIntensity = 1.0
-RenderView1.CameraFocalPoint = [15.0, -2624.999755859375, 0.0]
-RenderView1.ImageReductionFactor = 2
-RenderView1.CameraViewAngle = 30.0
-RenderView1.CameraParallelScale = 3766.3151510583625
-RenderView1.EyeAngle = 2.0
-RenderView1.HeadLightKHRatio = 3.0
-RenderView1.StereoRender = 0
-RenderView1.KeyLightIntensity = 0.75
-RenderView1.BackLightAzimuth = 110.0
-RenderView1.OrientationAxesInteractivity = 0
-RenderView1.UseInteractiveRenderingForSceenshots = 0
-RenderView1.UseOffscreenRendering = 0
-RenderView1.Background = [1.0, 1.0, 1.0]
-RenderView1.UseOffscreenRenderingForScreenshots = 1
-RenderView1.NonInteractiveRenderDelay = 2
-RenderView1.CenterOfRotation = [15.0, -2624.999755859375, 0.0]
-RenderView1.CameraParallelProjection = 0
-RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3'
-RenderView1.HeadLightWarmth = 0.5
-RenderView1.MaximumNumberOfPeels = 4
-RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0]
-RenderView1.StereoType = 'Red-Blue'
-RenderView1.DepthPeeling = 1
-RenderView1.BackLightKBRatio = 3.5
-RenderView1.StereoCapableWindow = 1
-RenderView1.CameraViewUp = [0.0, 1.0, 0.0]
-RenderView1.LightType = 'HeadLight'
-RenderView1.LightAmbientColor = [1.0, 1.0, 1.0]
-RenderView1.RemoteRenderThreshold = 3.0
-RenderView1.KeyLightElevation = 50.0
-RenderView1.CenterAxesVisibility = 0
-RenderView1.MaintainLuminance = 0
-RenderView1.StillRenderImageReductionFactor = 1
-RenderView1.BackLightWarmth = 0.5
-RenderView1.FillLightElevation = -75.0
-RenderView1.MultiSamples = 0
-RenderView1.FillLightWarmth = 0.4
-RenderView1.AlphaBitPlanes = 1
-RenderView1.LightSwitch = 0
-RenderView1.OrientationAxesVisibility = 0
-RenderView1.CameraClippingRange = [14351.66159325643, 14714.077290055833]
-RenderView1.BackLightElevation = 0.0
-RenderView1.ViewTime = 0.0
-RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0]
-RenderView1.LODThreshold = 5.0
-RenderView1.CollectGeometryThreshold = 100.0
-RenderView1.UseGradientBackground = 0
-RenderView1.KeyLightWarmth = 0.6
-RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0]
-
-in_msf_moc = CreateReader( MOCFileSeriesReader, [],  sys.argv[2])
-timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0]
-Threshold1 = Threshold( guiName="Threshold1", Scalars=['POINTS', 'reader_moc_global'], ThresholdRange=[-1000.0, 592.3663330078125], AllScalars=1 )
-
-Transform1 = Transform( guiName="Transform1", Transform="Transform" )
-Transform1.Transform.Scale = [40.0, -1.0, 1.0]
-Transform1.Transform.Rotate = [0.0, 0.0, 0.0]
-Transform1.Transform.Translate = [0.0, 0.0, 0.0]
-
-a1_reader_moc_global_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] )
-
-a1_reader_moc_global_PVLookupTable = GetLookupTableForArray( "reader_moc_global", 1, Discretize=1, RGBPoints=[-151.5101776123047, 0.23, 0.299, 0.754, 592.3663330078125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 )
-
-SetActiveSource(in_msf_moc)
-DataRepresentation1 = Show()
-DataRepresentation1.CubeAxesZAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation1.SuppressLOD = 0
-DataRepresentation1.CubeAxesXGridLines = 0
-DataRepresentation1.CubeAxesYAxisTickVisibility = 1
-DataRepresentation1.Position = [0.0, 0.0, 0.0]
-DataRepresentation1.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation1.SelectionOpacity = 1.0
-DataRepresentation1.SelectionPointLabelShadow = 0
-DataRepresentation1.CubeAxesYGridLines = 0
-DataRepresentation1.OrientationMode = 'Direction'
-DataRepresentation1.Source.TipResolution = 6
-DataRepresentation1.ScaleMode = 'No Data Scaling Off'
-DataRepresentation1.Diffuse = 1.0
-DataRepresentation1.SelectionUseOutline = 0
-DataRepresentation1.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation1.Specular = 0.1
-DataRepresentation1.SelectionVisibility = 1
-DataRepresentation1.InterpolateScalarsBeforeMapping = 1
-DataRepresentation1.CubeAxesZAxisTickVisibility = 1
-DataRepresentation1.Origin = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesVisibility = 0
-DataRepresentation1.Scale = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelJustification = 'Left'
-DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelOpacity = 1.0
-DataRepresentation1.Source = "Arrow"
-DataRepresentation1.Source.Invert = 0
-DataRepresentation1.Masking = 0
-DataRepresentation1.Opacity = 1.0
-DataRepresentation1.LineWidth = 1.0
-DataRepresentation1.MeshVisibility = 0
-DataRepresentation1.Visibility = 0
-DataRepresentation1.SelectionCellLabelFontSize = 18
-DataRepresentation1.CubeAxesCornerOffset = 0.0
-DataRepresentation1.SelectionPointLabelJustification = 'Left'
-DataRepresentation1.Ambient = 0.0
-DataRepresentation1.SelectOrientationVectors = ''
-DataRepresentation1.CubeAxesTickLocation = 'Inside'
-DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.CubeAxesYAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation1.Source.ShaftResolution = 6
-DataRepresentation1.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation1.SelectScaleArray = ''
-DataRepresentation1.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation1.ColorAttributeType = 'POINT_DATA'
-DataRepresentation1.SpecularPower = 100.0
-DataRepresentation1.Texture = []
-DataRepresentation1.SelectionCellLabelShadow = 0
-DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.MapScalars = 1
-DataRepresentation1.PointSize = 2.0
-DataRepresentation1.Source.TipLength = 0.35
-DataRepresentation1.SelectionCellLabelFormat = ''
-DataRepresentation1.Scaling = 0
-DataRepresentation1.StaticMode = 0
-DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation1.Source.TipRadius = 0.1
-DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation1.CubeAxesXAxisTickVisibility = 1
-DataRepresentation1.SelectionCellLabelVisibility = 0
-DataRepresentation1.NonlinearSubdivisionLevel = 1
-DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Representation = 'Surface'
-DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation1.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesXTitle = 'X-Axis'
-DataRepresentation1.CubeAxesInertia = 1
-DataRepresentation1.BackfaceOpacity = 1.0
-DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation1.SelectionPointLabelVisibility = 0
-DataRepresentation1.SelectionPointLabelFontSize = 18
-DataRepresentation1.ScaleFactor = 1.0
-DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Source.ShaftRadius = 0.03
-DataRepresentation1.SelectMaskArray = ''
-DataRepresentation1.SelectionLineWidth = 2.0
-DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesXAxisVisibility = 1
-DataRepresentation1.Interpolation = 'Gouraud'
-DataRepresentation1.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation1.SelectionCellLabelItalic = 0
-DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesZGridLines = 0
-DataRepresentation1.SelectionPointLabelFormat = ''
-DataRepresentation1.SelectionPointLabelOpacity = 1.0
-DataRepresentation1.Pickable = 1
-DataRepresentation1.CustomBoundsActive = [0, 0, 0]
-DataRepresentation1.SelectionRepresentation = 'Wireframe'
-DataRepresentation1.SelectionPointLabelBold = 0
-DataRepresentation1.ColorArrayName = 'reader_moc_global'
-DataRepresentation1.SelectionPointLabelItalic = 0
-DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation1.LookupTable = a1_reader_moc_global_PVLookupTable
-DataRepresentation1.SelectionPointSize = 5.0
-DataRepresentation1.SelectionCellLabelBold = 0
-DataRepresentation1.Orient = 0
-
-SetActiveSource(Threshold1)
-DataRepresentation2 = Show()
-DataRepresentation2.CubeAxesZAxisVisibility = 1
-DataRepresentation2.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation2.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation2.SuppressLOD = 0
-DataRepresentation2.CubeAxesXGridLines = 0
-DataRepresentation2.CubeAxesYAxisTickVisibility = 1
-DataRepresentation2.Position = [0.0, 0.0, 0.0]
-DataRepresentation2.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation2.SelectionOpacity = 1.0
-DataRepresentation2.SelectionPointLabelShadow = 0
-DataRepresentation2.CubeAxesYGridLines = 0
-DataRepresentation2.OrientationMode = 'Direction'
-DataRepresentation2.Source.TipResolution = 6
-DataRepresentation2.ScaleMode = 'No Data Scaling Off'
-DataRepresentation2.Diffuse = 1.0
-DataRepresentation2.SelectionUseOutline = 0
-DataRepresentation2.SelectionPointLabelFormat = ''
-DataRepresentation2.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation2.Specular = 0.1
-DataRepresentation2.SelectionVisibility = 1
-DataRepresentation2.InterpolateScalarsBeforeMapping = 1
-DataRepresentation2.CubeAxesZAxisTickVisibility = 1
-DataRepresentation2.Origin = [0.0, 0.0, 0.0]
-DataRepresentation2.CubeAxesVisibility = 0
-DataRepresentation2.Scale = [1.0, 1.0, 1.0]
-DataRepresentation2.SelectionCellLabelJustification = 'Left'
-DataRepresentation2.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation2.SelectionCellLabelOpacity = 1.0
-DataRepresentation2.CubeAxesInertia = 1
-DataRepresentation2.Source = "Arrow"
-DataRepresentation2.Source.Invert = 0
-DataRepresentation2.Masking = 0
-DataRepresentation2.Opacity = 1.0
-DataRepresentation2.LineWidth = 1.0
-DataRepresentation2.MeshVisibility = 0
-DataRepresentation2.Visibility = 0
-DataRepresentation2.SelectionCellLabelFontSize = 18
-DataRepresentation2.CubeAxesCornerOffset = 0.0
-DataRepresentation2.SelectionPointLabelJustification = 'Left'
-DataRepresentation2.SelectionPointLabelVisibility = 0
-DataRepresentation2.SelectOrientationVectors = ''
-DataRepresentation2.CubeAxesTickLocation = 'Inside'
-DataRepresentation2.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation2.CubeAxesYAxisVisibility = 1
-DataRepresentation2.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation2.Source.ShaftResolution = 6
-DataRepresentation2.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation2.SelectScaleArray = ''
-DataRepresentation2.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation2.ColorAttributeType = 'POINT_DATA'
-DataRepresentation2.SpecularPower = 100.0
-DataRepresentation2.Texture = []
-DataRepresentation2.SelectionCellLabelShadow = 0
-DataRepresentation2.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation2.MapScalars = 1
-DataRepresentation2.PointSize = 2.0
-DataRepresentation2.Source.TipLength = 0.35
-DataRepresentation2.SelectionCellLabelFormat = ''
-DataRepresentation2.Scaling = 0
-DataRepresentation2.StaticMode = 0
-DataRepresentation2.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation2.Source.TipRadius = 0.1
-DataRepresentation2.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation2.CubeAxesXAxisTickVisibility = 1
-DataRepresentation2.SelectionCellLabelVisibility = 0
-DataRepresentation2.NonlinearSubdivisionLevel = 1
-DataRepresentation2.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation2.Representation = 'Surface'
-DataRepresentation2.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation2.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation2.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation2.CubeAxesXTitle = 'X-Axis'
-DataRepresentation2.ScalarOpacityUnitDistance = 287.4628538795667
-DataRepresentation2.BackfaceOpacity = 1.0
-DataRepresentation2.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation2.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation2.Ambient = 0.0
-DataRepresentation2.SelectionPointLabelFontSize = 18
-DataRepresentation2.ScaleFactor = 1.0
-DataRepresentation2.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation2.Source.ShaftRadius = 0.03
-DataRepresentation2.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction
-DataRepresentation2.SelectMaskArray = ''
-DataRepresentation2.SelectionLineWidth = 2.0
-DataRepresentation2.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation2.CubeAxesXAxisVisibility = 1
-DataRepresentation2.Interpolation = 'Gouraud'
-DataRepresentation2.SelectMapper = 'Projected tetra'
-DataRepresentation2.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation2.SelectionCellLabelItalic = 0
-DataRepresentation2.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation2.CubeAxesZGridLines = 0
-DataRepresentation2.ExtractedBlockIndex = 0
-DataRepresentation2.SelectionPointLabelOpacity = 1.0
-DataRepresentation2.Pickable = 1
-DataRepresentation2.CustomBoundsActive = [0, 0, 0]
-DataRepresentation2.SelectionRepresentation = 'Wireframe'
-DataRepresentation2.SelectionPointLabelBold = 0
-DataRepresentation2.ColorArrayName = 'reader_moc_global'
-DataRepresentation2.SelectionPointLabelItalic = 0
-DataRepresentation2.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation2.LookupTable = a1_reader_moc_global_PVLookupTable
-DataRepresentation2.SelectionPointSize = 5.0
-DataRepresentation2.SelectionCellLabelBold = 0
-DataRepresentation2.Orient = 0
-
-SetActiveSource(Transform1)
-DataRepresentation3 = Show()
-DataRepresentation3.CubeAxesZAxisVisibility = 1
-DataRepresentation3.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation3.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation3.SuppressLOD = 0
-DataRepresentation3.CubeAxesXGridLines = 0
-DataRepresentation3.CubeAxesYAxisTickVisibility = 1
-DataRepresentation3.Position = [0.0, 0.0, 0.0]
-DataRepresentation3.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation3.SelectionOpacity = 1.0
-DataRepresentation3.SelectionPointLabelShadow = 0
-DataRepresentation3.CubeAxesYGridLines = 0
-DataRepresentation3.OrientationMode = 'Direction'
-DataRepresentation3.Source.TipResolution = 6
-DataRepresentation3.ScaleMode = 'No Data Scaling Off'
-DataRepresentation3.Diffuse = 1.0
-DataRepresentation3.SelectionUseOutline = 0
-DataRepresentation3.SelectionPointLabelFormat = ''
-DataRepresentation3.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation3.Specular = 0.1
-DataRepresentation3.SelectionVisibility = 1
-DataRepresentation3.InterpolateScalarsBeforeMapping = 1
-DataRepresentation3.CubeAxesZAxisTickVisibility = 1
-DataRepresentation3.Origin = [0.0, 0.0, 0.0]
-DataRepresentation3.CubeAxesVisibility = 0
-DataRepresentation3.Scale = [1.0, 1.0, 1.0]
-DataRepresentation3.SelectionCellLabelJustification = 'Left'
-DataRepresentation3.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation3.SelectionCellLabelOpacity = 1.0
-DataRepresentation3.CubeAxesInertia = 1
-DataRepresentation3.Source = "Arrow"
-DataRepresentation3.Source.Invert = 0
-DataRepresentation3.Masking = 0
-DataRepresentation3.Opacity = 1.0
-DataRepresentation3.LineWidth = 1.0
-DataRepresentation3.MeshVisibility = 0
-DataRepresentation3.Visibility = 1
-DataRepresentation3.SelectionCellLabelFontSize = 18
-DataRepresentation3.CubeAxesCornerOffset = 0.0
-DataRepresentation3.SelectionPointLabelJustification = 'Left'
-DataRepresentation3.SelectionPointLabelVisibility = 0
-DataRepresentation3.SelectOrientationVectors = ''
-DataRepresentation3.CubeAxesTickLocation = 'Inside'
-DataRepresentation3.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation3.CubeAxesYAxisVisibility = 1
-DataRepresentation3.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation3.Source.ShaftResolution = 6
-DataRepresentation3.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation3.SelectScaleArray = ''
-DataRepresentation3.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation3.ColorAttributeType = 'POINT_DATA'
-DataRepresentation3.SpecularPower = 100.0
-DataRepresentation3.Texture = []
-DataRepresentation3.SelectionCellLabelShadow = 0
-DataRepresentation3.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation3.MapScalars = 1
-DataRepresentation3.PointSize = 2.0
-DataRepresentation3.Source.TipLength = 0.35
-DataRepresentation3.SelectionCellLabelFormat = ''
-DataRepresentation3.Scaling = 0
-DataRepresentation3.StaticMode = 0
-DataRepresentation3.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation3.Source.TipRadius = 0.1
-DataRepresentation3.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation3.CubeAxesXAxisTickVisibility = 1
-DataRepresentation3.SelectionCellLabelVisibility = 0
-DataRepresentation3.NonlinearSubdivisionLevel = 1
-DataRepresentation3.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation3.Representation = 'Surface'
-DataRepresentation3.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation3.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation3.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation3.CubeAxesXTitle = 'X-Axis'
-DataRepresentation3.ScalarOpacityUnitDistance = 388.2163580108114
-DataRepresentation3.BackfaceOpacity = 1.0
-DataRepresentation3.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation3.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation3.Ambient = 0.0
-DataRepresentation3.SelectionPointLabelFontSize = 18
-DataRepresentation3.ScaleFactor = 1.0
-DataRepresentation3.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation3.Source.ShaftRadius = 0.03
-DataRepresentation3.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction
-DataRepresentation3.SelectMaskArray = ''
-DataRepresentation3.SelectionLineWidth = 2.0
-DataRepresentation3.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation3.CubeAxesXAxisVisibility = 1
-DataRepresentation3.Interpolation = 'Gouraud'
-DataRepresentation3.SelectMapper = 'Projected tetra'
-DataRepresentation3.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation3.SelectionCellLabelItalic = 0
-DataRepresentation3.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation3.CubeAxesZGridLines = 0
-DataRepresentation3.ExtractedBlockIndex = 0
-DataRepresentation3.SelectionPointLabelOpacity = 1.0
-DataRepresentation3.Pickable = 1
-DataRepresentation3.CustomBoundsActive = [0, 0, 0]
-DataRepresentation3.SelectionRepresentation = 'Wireframe'
-DataRepresentation3.SelectionPointLabelBold = 0
-DataRepresentation3.ColorArrayName = 'reader_moc_global'
-DataRepresentation3.SelectionPointLabelItalic = 0
-DataRepresentation3.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation3.LookupTable = a1_reader_moc_global_PVLookupTable
-DataRepresentation3.SelectionPointSize = 5.0
-DataRepresentation3.SelectionCellLabelBold = 0
-DataRepresentation3.Orient = 0
-
-
-
-IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views)
-
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MOCTemporalStatistics.py b/uvcdatspt/scripts/MOCTemporalStatistics.py
deleted file mode 100644
index c6d51900bcd7c028930124a249ac5fe39bfd2e51..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/MOCTemporalStatistics.py
+++ /dev/null
@@ -1,26 +0,0 @@
-print 'starting'
-import sys
-from paraview.simple import *
-
-if len(sys.argv) < 3:
-    print 'Usage: pvbatch MOCTemporalStatistics.py <output file name> <input file names>'
-    sys.exit(1)
-
-paraview.simple._DisableFirstRenderCameraReset()
-reader = MOCFileSeriesReader()
-print 'input file names are: ', sys.argv[2:len(sys.argv)]
-print 'output file name is: ', sys.argv[1]
-reader.FileName = sys.argv[2:len(sys.argv)]
-
-MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics()
-MultiBlockTemporalStatistics1.TimeStepType = 0
-MultiBlockTemporalStatistics1.SamplingMethod = 1
-MultiBlockTemporalStatistics1.TimeSpan = 0
-MultiBlockTemporalStatistics1.TimeStepLength = 1
-MultiBlockTemporalStatistics1.TimeCompartmentSize = 16
-
-writer = XMLMultiBlockDataWriter()
-writer.FileName = sys.argv[1]
-writer.UpdatePipeline()
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MWehnerTemporalStatistics.py b/uvcdatspt/scripts/MWehnerTemporalStatistics.py
deleted file mode 100644
index d9f2f4c1af18cca9349c784b85ede9d8f1eb35f9..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/MWehnerTemporalStatistics.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Script for computing temporal statistics (average, minimum, maximum
-# and standard deviation) on hopper.nersc.gov. The input is a single
-# file that contains multipe time steps. The time compartment size is
-# a command line argument.
-
-import sys
-import time
-start = time.time()
-
-try: paraview.simple
-except: from paraview.simple import *
-paraview.simple._DisableFirstRenderCameraReset()
-
-import libvtkParallelPython
-import paraview
-pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-globalController = pm.GetGlobalController()
-pid = globalController.GetLocalProcessId()
-
-tcsize = sys.argv[1]
-
-fileName = "statsmwhenertwod.vtm"
-
-if pid == 0:
-    print 'starting script with tcsize of ', tcsize, ' and output filename using ', fileName
-
-V_cam5_1_amip_run2_cam2_h0_1994_nc = NetCDFReader( FileName=['/global/project/projectdirs/m1517/ACE/cam5.1/control/0.25_degre
-e/monthly/run2/zg_Amon_CAM5.1_0.25degree_control_v1.0_run2_197901-200512.nc'] )
-
-V_cam5_1_amip_run2_cam2_h0_1994_nc.Dimensions = '(plev, lat, lon)'
-V_cam5_1_amip_run2_cam2_h0_1994_nc.SphericalCoordinates = 0
-
-MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics()
-MultiBlockTemporalStatistics1.TimeStepType = 'Months'
-#MultiBlockTemporalStatistics1.SamplingMethod = 'Consecutive'
-MultiBlockTemporalStatistics1.SamplingMethod = 'Climatology'
-#MultiBlockTemporalStatistics1.TimeSpan = 'Year'
-MultiBlockTemporalStatistics1.TimeSpan = 'AllTimeSteps'
-MultiBlockTemporalStatistics1.TimeCompartmentSize = int(tcsize)
-
-writer = XMLMultiBlockDataWriter()
-writer.FileName = fileName
-
-writer.UpdatePipeline()
-if pid == 0:
-    print 'finished run in ', time.time()-start
-
diff --git a/uvcdatspt/scripts/POPGenerateImages.py b/uvcdatspt/scripts/POPGenerateImages.py
deleted file mode 100644
index 86f61e47fd0bfdc1da2cf709e564e9e4b29875f8..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/POPGenerateImages.py
+++ /dev/null
@@ -1,310 +0,0 @@
-# Spatio-temporal script for generating images for POP NetCDF
-# output files. This one pseudo-colors by TEMP. It has a
-# time compartment size of 4 so the number of processes
-# also needs to be a multiple of 4. To run it, do:
-# mpirun -np <numprocs> ./pvbatch --symmetric POPGenerateImages.py
-
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-currentTimeStep = -1
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-tp_writers = []
-tp_views = []
-# ==================== end of specialized temporal parallelism sections ==================
-
-timeCompartmentSize = 4
-globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-RenderView1 = CreateView( CreateRenderView, "POP_TEMP_%t.png", 1, 549, 583, tp_views )
-RenderView1.LightSpecularColor = [1.0, 1.0, 1.0]
-RenderView1.InteractionMode = '3D'
-RenderView1.UseTexturedBackground = 0
-RenderView1.UseLight = 1
-RenderView1.CameraPosition = [24413625.828416377, -24592716.541236263, 5758186.884780747]
-RenderView1.FillLightKFRatio = 3.0
-RenderView1.Background2 = [0.0, 0.0, 0.165]
-RenderView1.FillLightAzimuth = -10.0
-RenderView1.LODResolution = 50.0
-RenderView1.BackgroundTexture = []
-RenderView1.KeyLightAzimuth = 10.0
-RenderView1.StencilCapable = 1
-RenderView1.LightIntensity = 1.0
-RenderView1.CameraFocalPoint = [1.78529588937719e-12, 1.4505529101189668e-12, 64147.750000000015]
-RenderView1.ImageReductionFactor = 2
-RenderView1.CameraViewAngle = 30.0
-RenderView1.CameraParallelScale = 30343845.664423227
-RenderView1.EyeAngle = 2.0
-RenderView1.HeadLightKHRatio = 3.0
-RenderView1.StereoRender = 0
-RenderView1.KeyLightIntensity = 0.75
-RenderView1.BackLightAzimuth = 110.0
-RenderView1.OrientationAxesInteractivity = 0
-RenderView1.UseInteractiveRenderingForSceenshots = 0
-RenderView1.UseOffscreenRendering = 0
-RenderView1.Background = [0.31999694819562063, 0.3400015259021897, 0.4299992370489052]
-RenderView1.UseOffscreenRenderingForScreenshots = 1
-RenderView1.NonInteractiveRenderDelay = 2
-RenderView1.CenterOfRotation = [0.0, 0.0, 64147.75]
-RenderView1.CameraParallelProjection = 0
-RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3'
-RenderView1.HeadLightWarmth = 0.5
-RenderView1.MaximumNumberOfPeels = 4
-RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0]
-RenderView1.StereoType = 'Red-Blue'
-RenderView1.DepthPeeling = 1
-RenderView1.BackLightKBRatio = 3.5
-RenderView1.StereoCapableWindow = 1
-RenderView1.CameraViewUp = [0.0471859955443886, 0.2695389330828218, 0.9618327533293193]
-RenderView1.LightType = 'HeadLight'
-RenderView1.LightAmbientColor = [1.0, 1.0, 1.0]
-RenderView1.RemoteRenderThreshold = 3.0
-RenderView1.KeyLightElevation = 50.0
-RenderView1.CenterAxesVisibility = 0
-RenderView1.MaintainLuminance = 0
-RenderView1.StillRenderImageReductionFactor = 1
-RenderView1.BackLightWarmth = 0.5
-RenderView1.FillLightElevation = -75.0
-RenderView1.MultiSamples = 0
-RenderView1.FillLightWarmth = 0.4
-RenderView1.AlphaBitPlanes = 1
-RenderView1.LightSwitch = 0
-RenderView1.OrientationAxesVisibility = 0
-RenderView1.CameraClippingRange = [15039199.876017962, 60476974.08593859]
-RenderView1.BackLightElevation = 0.0
-RenderView1.ViewTime = 0.0
-RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0]
-RenderView1.LODThreshold = 5.0
-RenderView1.CollectGeometryThreshold = 100.0
-RenderView1.UseGradientBackground = 0
-RenderView1.KeyLightWarmth = 0.6
-RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0]
-
-TEMP_t_t0_1_42l_oilspill12c_00060101_pop_nc = CreateReader( UnstructuredNetCDFPOPreader, ['Stride=[10, 10, 10]', 'VerticalVelocity=0', 'VOI=[0, -1, 0, -1, 0, -1]'], "/home/acbauer/DATA/UVCDAT/TEMP.t.t0.1_42l_oilspill12c.*.pop.nc" )
-timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0]
-a1_TEMP_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] )
-
-a1_TEMP_PVLookupTable = GetLookupTableForArray( "TEMP", 1, Discretize=1, RGBPoints=[-20.0, 0.23, 0.299, 0.754, 31.338409423828125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 )
-
-DataRepresentation1 = Show()
-DataRepresentation1.CubeAxesZAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation1.SuppressLOD = 0
-DataRepresentation1.CubeAxesXGridLines = 0
-DataRepresentation1.CubeAxesYAxisTickVisibility = 1
-DataRepresentation1.Position = [0.0, 0.0, 0.0]
-DataRepresentation1.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation1.SelectionOpacity = 1.0
-DataRepresentation1.SelectionPointLabelShadow = 0
-DataRepresentation1.CubeAxesYGridLines = 0
-DataRepresentation1.OrientationMode = 'Direction'
-DataRepresentation1.Source.TipResolution = 6
-DataRepresentation1.ScaleMode = 'No Data Scaling Off'
-DataRepresentation1.Diffuse = 1.0
-DataRepresentation1.SelectionUseOutline = 0
-DataRepresentation1.SelectionPointLabelFormat = ''
-DataRepresentation1.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation1.Specular = 0.1
-DataRepresentation1.SelectionVisibility = 1
-DataRepresentation1.InterpolateScalarsBeforeMapping = 1
-DataRepresentation1.CubeAxesZAxisTickVisibility = 1
-DataRepresentation1.Origin = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesVisibility = 0
-DataRepresentation1.Scale = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelJustification = 'Left'
-DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelOpacity = 1.0
-DataRepresentation1.CubeAxesInertia = 1
-DataRepresentation1.Source = "Arrow"
-DataRepresentation1.Source.Invert = 0
-DataRepresentation1.Masking = 0
-DataRepresentation1.Opacity = 1.0
-DataRepresentation1.LineWidth = 1.0
-DataRepresentation1.MeshVisibility = 0
-DataRepresentation1.Visibility = 1
-DataRepresentation1.SelectionCellLabelFontSize = 18
-DataRepresentation1.CubeAxesCornerOffset = 0.0
-DataRepresentation1.SelectionPointLabelJustification = 'Left'
-DataRepresentation1.SelectionPointLabelVisibility = 0
-DataRepresentation1.SelectOrientationVectors = ''
-DataRepresentation1.CubeAxesTickLocation = 'Inside'
-DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.CubeAxesYAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation1.Source.ShaftResolution = 6
-DataRepresentation1.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation1.SelectScaleArray = ''
-DataRepresentation1.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation1.ColorAttributeType = 'POINT_DATA'
-DataRepresentation1.SpecularPower = 100.0
-DataRepresentation1.Texture = []
-DataRepresentation1.SelectionCellLabelShadow = 0
-DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.MapScalars = 1
-DataRepresentation1.PointSize = 2.0
-DataRepresentation1.Source.TipLength = 0.35
-DataRepresentation1.SelectionCellLabelFormat = ''
-DataRepresentation1.Scaling = 0
-DataRepresentation1.StaticMode = 0
-DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation1.Source.TipRadius = 0.1
-DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation1.CubeAxesXAxisTickVisibility = 1
-DataRepresentation1.SelectionCellLabelVisibility = 0
-DataRepresentation1.NonlinearSubdivisionLevel = 1
-DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Representation = 'Surface'
-DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation1.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesXTitle = 'X-Axis'
-DataRepresentation1.ScalarOpacityUnitDistance = 313870.26193506655
-DataRepresentation1.BackfaceOpacity = 1.0
-DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation1.Ambient = 0.0
-DataRepresentation1.SelectionPointLabelFontSize = 18
-DataRepresentation1.ScaleFactor = 1.0
-DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Source.ShaftRadius = 0.03
-DataRepresentation1.ScalarOpacityFunction = a1_TEMP_PiecewiseFunction
-DataRepresentation1.SelectMaskArray = ''
-DataRepresentation1.SelectionLineWidth = 2.0
-DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesXAxisVisibility = 1
-DataRepresentation1.Interpolation = 'Gouraud'
-DataRepresentation1.SelectMapper = 'Projected tetra'
-DataRepresentation1.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation1.SelectionCellLabelItalic = 0
-DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesZGridLines = 0
-DataRepresentation1.ExtractedBlockIndex = 0
-DataRepresentation1.SelectionPointLabelOpacity = 1.0
-DataRepresentation1.Pickable = 1
-DataRepresentation1.CustomBoundsActive = [0, 0, 0]
-DataRepresentation1.SelectionRepresentation = 'Wireframe'
-DataRepresentation1.SelectionPointLabelBold = 0
-DataRepresentation1.ColorArrayName = 'TEMP'
-DataRepresentation1.SelectionPointLabelItalic = 0
-DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation1.LookupTable = a1_TEMP_PVLookupTable
-DataRepresentation1.SelectionPointSize = 5.0
-DataRepresentation1.SelectionCellLabelBold = 0
-DataRepresentation1.Orient = 0
-
-
-
-IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views)
diff --git a/uvcdatspt/scripts/benchmark.py b/uvcdatspt/scripts/benchmark.py
deleted file mode 100644
index dca7f2ab87a8fba462bd7d3b742d1434714b0e35..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/benchmark.py
+++ /dev/null
@@ -1,626 +0,0 @@
-"""
-This module has utilities to benchmark paraview.
-
-First, when run standalone, this will do a simple rendering benchmark test. The
-test renders a sphere with various rendering settings and reports the rendering
-rate achieved in triangles/sec. run() is the entrypoint for that usage.
-
-Second, you can set up arbitrary pipelines and this module helps you obtain,
-interpret and report the information recorded by ParaView's logs.
-Do that like so:
-1) optionally, call maximize logs first
-2) setup and run your visualization pipeline (via GUI or script as you prefer)
-3) either
-- call print_logs() to print out the logs in raw format
-or
-- call parse_logs() to let the script identify and report on per frame and per
-filter execution times
-
-WARNING: This was meant for server side rendering, but it could work
-reasonably well when geometry is delivered to the client and rendered there
-if the script were changed to recognize MPIMoveData as end of frame and did
-something sensible on the server which has no other end of frame knowledge
-
-TODO: builtin mode shouldn't show server info, it is redundant
-TODO: this doesn't handle split render/data server mode
-TODO: the end of frame markers are heuristic, likely buggy, and have not
-been tried since before 3.9's view restructuring
-"""
-
-import time
-import sys
-from paraview.simple import *
-
-try:
-    import numpy
-    numpy_loaded = True
-except ImportError:
-    numpy_loaded = False
-
-import re
-import paraview
-import copy
-import pickle
-
-# a regular expression to parse filter execution time
-match_filter = re.compile(" *Execute (\w+) id: +(\d+), +(\d*.*\d+) +seconds")
-match_vfilter = re.compile(" *Execute (\w+) +, +(\d*.*\d+) +seconds")
-
-# a regular expression to parse overall rendering time
-match_still_render = re.compile(" *(Still) Render, +(\d*.*\d+) +seconds")
-match_interactive_render = \
-re.compile(" *(Interactive) Render, +(\d*.*\d+) +seconds")
-match_render = re.compile(" *(\w+|\w+ Dev) Render, +(\d*.*\d+) +seconds")
-match_icetrender = re.compile("(IceT Dev) Render, +(\d*.*\d+) +seconds")
-
-# more for parallel composite and delivery time
-match_composite = re.compile(" *Compositing, +(\d*.*\d+) +seconds")
-match_send = re.compile(" *Sending, +(\d*.*\d+) +seconds")
-match_receive = re.compile(" *Receiving, +(\d*.*\d+) +seconds")
-
-match_comp_xmit = \
-re.compile(" *TreeComp (Send|Receive) (\d+) " + \
-           "(to|from) (\d+) uchar (\d+), +(\d*.*\d+) +seconds")
-match_comp_comp = re.compile(" *TreeComp composite, *(\d*.*\d+) +seconds")
-
-showparse = False
-
-#icet composite message comes after the render messages,
-#where for bswap and manta it comes before so we have to treat icet differently
-icetquirk = False
-
-start_frame = 0
-default_log_threshold = dict()
-default_buffer_length = dict()
-
-class OneLog :
-    def __init__(self):
-        self.runmode = 'batch'
-        self.servertype = 'unified'
-        self.component = 0x10
-        self.rank = 0
-        self.lines = []
-
-    def componentString(self):
-        ret = ""
-        if self.component & 0x10:
-            ret = ret + " CLIENT "
-        if self.component & 0x4:
-            ret = ret + " RENDER "
-        if self.component & 0x1:
-            ret = ret + " DATA "
-        return ret
-
-    def print_log(self, showlines=False):
-        print "#RunMode:", self.runmode,
-        print "ServerType:", self.servertype,
-        print "Component:", self.componentString(),
-        print "processor#:", self.rank
-        if showlines:
-            for i in self.lines:
-                print i
-
-logs = []
-
-def maximize_logs () :
-    """
-    Convenience method to ask paraview to produce logs with lots of space and
-    highest resolution.
-    """
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    if pm == None:
-        return
-
-    # Not used here...
-    default_buffer_length[str(0x01)] = 1000000
-    default_buffer_length[str(0x04)] = 1000000
-    default_buffer_length[str(0x10)] = 1000000
-
-    default_log_threshold[str(0x01)] = 0.0
-    default_log_threshold[str(0x04)] = 0.0
-    default_log_threshold[str(0x10)] = 0.0
-
-
-def dump_logs( filename ) :
-    """
-    This saves off the logs we've gathered.
-    Ot allows you to run a benchmark somewhere, save off all of the details in
-    raw format, then load them somewhere else. You can then do a detailed
-    analysis and you always have the raw data to go back to.
-    """
-    global logs
-    f = open(filename, "w")
-    pickle.dump(logs, f)
-    f.close()
-
-def import_logs( filename ) :
-    """
-    This is for bringing in a saved log files and parse it after the fact.
-    TODO: add an option to load in raw parview logs in text format
-    """
-    global logs
-    logs = []
-    f = open(filename, "r")
-    logs = pickle.load(f)
-    f.close()
-
-def get_logs() :
-    """
-    This is for bringing in logs at run time to parse while running.
-    """
-    global logs
-    logs = []
-
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    if pm == None:
-        return
-
-    connectionId = paraview.servermanager.ActiveConnection.ID
-    session = paraview.servermanager.ActiveConnection.Session
-    pmOptions = pm.GetOptions()
-
-    """
-    vtkPVOptions::ProcessTypeEnum
-    PARAVIEW = 0x2,
-    PVCLIENT = 0x4,
-    PVSERVER = 0x8,
-    PVRENDER_SERVER = 0x10,
-    PVDATA_SERVER = 0x20,
-    PVBATCH = 0x40,
-    """
-    if pmOptions.GetProcessType() == 0x40:
-        runmode = 'batch'
-    else:
-        runmode = 'interactive'
-
-    """
-    vtkSMSession::RenderingMode
-    RENDERING_NOT_AVAILABLE = 0x00,
-    RENDERING_UNIFIED = 0x01,
-    RENDERING_SPLIT = 0x02
-    """
-    if session.GetRenderClientMode() == 0x01:
-        servertype = 'unified'
-    else:
-        servertype = 'split'
-
-    """
-    vtkProcessModule::SERVER_FLAGS
-    DATA_SERVER = 0x01,
-    DATA_SERVER_ROOT = 0x02,
-    RENDER_SERVER = 0x04,
-    RENDER_SERVER_ROOT = 0x08,
-    SERVERS = DATA_SERVER | RENDER_SERVER,
-    CLIENT = 0x10,
-    CLIENT_AND_SERVERS = DATA_SERVER | CLIENT | RENDER_SERVER
-    """
-    if runmode == 'batch':
-        components = [0x04]
-    else:
-        if servertype == 'unified':
-            components = [0x10, 0x04]
-        else:
-            components = [0x10, 0x04, 0x01]
-
-    for component in components:
-        timerInfo = paraview.servermanager.vtkPVTimerInformation()
-        if len(default_log_threshold) != 0:
-           timerInfo.SetLogThreshold(default_log_threshold[str(component)])
-        session.GatherInformation(component, timerInfo, 0)
-
-        for i in range(timerInfo.GetNumberOfLogs()):
-            alog = OneLog()
-            alog.runmode = runmode
-            alog.servertype = servertype
-            alog.component = component
-            alog.rank = i
-            for line in timerInfo.GetLog(i).split('\n'):
-                alog.lines.append(line)
-            logs.append(alog)
-
-def print_logs() :
-    global logs
-
-    if len(logs) == 0:
-        get_logs()
-
-    for i in logs:
-       i.print_log(True)
-
-def __process_frame() :
-    global filters
-    global current_frames_records
-    global frames
-    global start_frame
-
-    max = len(current_frames_records)
-
-    #determine ancestry of each record from order and indent
-    #subtract only immediate children from each record
-
-    #TODO: Make this an option
-    for x in xrange(max):
-        indent = current_frames_records[x]['indent']
-        minindent = 10000
-        for y in xrange(x+1,max):
-            indent2 = current_frames_records[y]['indent']
-            if indent2<=indent:
-                #found a record which is not a descendant
-                break
-            if indent2 < minindent:
-                minindent = indent2
-        for y in xrange(x+1,max):
-            indent2 = current_frames_records[y]['indent']
-            if indent2 == minindent:
-                current_frames_records[x]['local_duration'] = \
-                current_frames_records[x]['local_duration'] -\
-                current_frames_records[y]['duration']
-
-    for x in xrange(max):
-        #keep global statics per filter
-        record = current_frames_records[x]
-        id = record['id']
-        if id in filters:
-            srecord = filters[id]
-            srecord['duration'] = srecord['duration'] + record['duration']
-            srecord['local_duration'] = srecord['local_duration'] +\
-                                        record['local_duration']
-            srecord['count'] = srecord['count'] + 1
-            filters[id] = srecord
-        else:
-            filters[id] = copy.deepcopy(record)
-
-    #save off this frame and begin the next
-    frames.append(current_frames_records)
-    current_frames_records = []
-
-def __parse_line (line) :
-    """
-    Examine one line from the logs. If it is a report about a filter's
-    execution time, parse the relevant information out of the line and
-    collect those statistics. We record each filter's average execution
-    time as well as the each filters contribution to the each rendered frame.
-    """
-    global filters
-    global current_frames_records
-    global cnt
-    global show_input
-    global icetquirk
-
-    found = False
-
-    #find indent
-    cnt = 0
-    for c in range(len(line)):
-        if line[c] == " ":
-            cnt = cnt + 1
-        else:
-            break
-
-    #determine if this log comes from icet so we can
-    #do special case treatement for frame markings
-    icetline = False
-    match = match_icetrender.match(line)
-    if match != None:
-        icetquirk = True
-        icetline = True
-
-    match = match_filter.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "FILT:", cnt, line
-        name = match.group(1)
-        id = match.group(2)
-        duration = match.group(3)
-
-    match = match_vfilter.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "LFLT:", cnt, line
-        name = match.group(1)
-        id = name
-        duration = match.group(2)
-
-    match = match_comp_comp.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "TCMP:", cnt, line
-        name = "tree comp"
-        id = name
-        duration = match.group(1)
-
-    match = match_comp_xmit.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "TXMT:", cnt, line
-        name = match.group(1)
-        id = name
-        duration = match.group(6)
-
-    match = match_composite.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "COMP:", cnt, line
-        name = 'composite'
-        id = 'comp'
-        duration = match.group(1)
-
-    match = match_send.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "SEND:", cnt, line
-        name = 'send'
-        id = 'send'
-        duration = match.group(1)
-
-    match = match_receive.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "RECV:", cnt, line
-        name = 'receive'
-        id = 'recv'
-        duration = match.group(1)
-
-    match = match_still_render.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "STILL:", cnt, line
-        name = match.group(1)
-        id = 'still'
-        duration = match.group(2)
-
-    if match == None:
-        match = match_interactive_render.match(line)
-        if match != None:
-            found = True
-            if showparse:
-                print "INTER:", cnt, line
-            name = match.group(1)
-            id = 'inter'
-            duration = match.group(2)
-
-    if match == None:
-        match = match_render.match(line)
-        if match != None:
-            found = True
-            if showparse:
-                print "REND:", cnt, line
-            name = match.group(1)
-            id = 'render'
-            duration = match.group(2)
-
-    if found == False:
-        # we didn't find anything we recognized in this line, ignore it
-        if showparse:
-            print "????:", cnt, line
-        return
-
-    record = dict()
-    record['id'] = id
-    record['name'] = name
-    record['duration'] = float(duration)
-    record['local_duration'] = float(duration)
-    record['count'] = 1
-    record['indent'] = cnt
-
-    #watch for the beginning of the next frame/end of previous frame
-    if cnt == 0:
-        if (id == 'still') or \
-           (id == 'inter') or \
-           (icetquirk == False and id == 'comp') or \
-           (icetquirk == True and icetline == True) :
-            if showparse:
-                print "SOF" #start of frame
-            #decipher parent child information from records in the frame
-            #and save off newly gathered per filter and per frame statistics
-            __process_frame()
-
-    #keep a record of this execution as part for the current frame
-    current_frames_records.append(record)
-
-    return
-
-def parse_logs(show_parse = False, tabular = False) :
-    """
-    Parse the collected paraview log information.
-    This prints out per frame, and aggregated per filter statistics.
-
-    If show_parse is true, debugging information is shown about the parsing
-    process that allows you to verify that the derived stats are correct.
-    This includes each and echo of each log line collected, prepended by
-    the token type and indent scanned in, or ???? if the line is unrecognized
-    and ignored. Frame boundaries are denoted by SOF, indicating the preceeding
-    line was determined to be the start of the next frame.
-    """
-
-    global filters
-    global current_frames_records
-    global frames
-    global cnt
-    global showparse
-    global start_frame
-
-    showparse = show_parse
-
-    if len(logs) == 0:
-        get_logs()
-
-    for i in logs:
-        # per filter records
-        filters = dict()
-        filters.clear()
-        # per frame records
-        frames = []
-        # components of current frame
-        current_frames_records = []
-        cnt = 0
-
-        runmode = i.runmode
-        servertype = i.servertype
-        component = i.component
-        rank = i.rank
-        i.print_log(False)
-
-        for line in i.lines:
-            __parse_line(line)
-
-        #collect stats for the current frame in process but not officially ended
-        __process_frame()
-
-        #print out the gathered per frame information
-        if tabular:
-            frecs = dict()
-            line = "#framenum, "
-            for x in filters:
-                line += filters[x]['name'] + ":" + filters[x]['id']  + ", "
-            #print line
-            for cnt in xrange(start_frame, len(frames)):
-                line = ""
-                line += str(cnt) + ", "
-                printed = dict()
-                for x in filters:
-                    id = filters[x]['id']
-                    name = filters[x]['name']
-                    found = False
-                    for record in frames[cnt]:
-                        if 'id' in record:
-                            if record['id'] == id and \
-                            record['name'] == name and \
-                            not id in printed:
-                                found = True
-                                printed[id] = 1
-                                line += str(record['local_duration']) + ", "
-                                if not id in frecs:
-                                    frecs[id] = []
-                                frecs[id].append(record['local_duration'])
-                    if not found:
-                        line += "0, "
-                #print line
-            #print
-            for x in frecs.keys():
-                v = frecs[x]
-                print "# ", x, len(v),
-                if numpy_loaded:
-                    print numpy.min(v), numpy.mean(v), numpy.max(v),
-                    print numpy.std(v)
-        else:
-            print "#FRAME TIMINGS"
-            print "#filter id, filter type, inclusive duration, local duration"
-            for cnt in xrange(start_frame, len(frames)):
-                print "#Frame ", cnt
-                for record in frames[cnt]:
-                    if 'id' in record:
-                        print record['id'], ",",
-                        print record['name'], ",",
-                        print record['duration'], ",",
-                        print record['local_duration']
-        #print
-        #print
-
-        if not tabular:
-            #print out the gathered per filter information
-            print "#FILTER TIMINGS"
-            print "#filter id, filter type, count, "+\
-                  "sum inclusive duration, sum local duration"
-            for x in filters:
-                record = filters[x]
-                print record['id'], ",",
-                print record['name'], ",",
-                print record['count'], ",",
-                print record['duration'], ",",
-                print record['local_duration']
-            print
-
-def __render(ss, v, title, nframes):
-    print '============================================================'
-    print title
-    res = []
-    res.append(title)
-    for phires in (500, 1000):
-        ss.PhiResolution = phires
-        c = v.GetActiveCamera()
-        v.CameraPosition = [-3, 0, 0]
-        v.CameraFocalPoint = [0, 0, 0]
-        v.CameraViewUp = [0, 0, 1]
-        Render()
-        c1 = time.time()
-        for i in range(nframes):
-            c.Elevation(0.5)
-            Render()
-            if not servermanager.fromGUI:
-                sys.stdout.write(".")
-                sys.stdout.flush()
-        if not servermanager.fromGUI:
-            sys.stdout.write("\n")
-        tpr = (time.time() - c1)/nframes
-        ncells = ss.GetDataInformation().GetNumberOfCells()
-        print tpr, " secs/frame"
-        print ncells, " polys"
-        print ncells/tpr, " polys/sec"
-
-        res.append((ncells, ncells/tpr))
-    return res
-
-def run(filename=None, nframes=60):
-    """ Runs the benchmark. If a filename is specified, it will write the
-    results to that file as csv. The number of frames controls how many times
-    a particular configuration is rendered. Higher numbers lead to more accurate
-    averages. """
-    # Turn off progress printing
-    paraview.servermanager.SetProgressPrintingEnabled(0)
-
-    # Create a sphere source to use in the benchmarks
-    ss = Sphere(ThetaResolution=1000, PhiResolution=500)
-    rep = Show()
-    v = Render()
-    results = []
-
-    # Start with these defaults
-    #v.RemoteRenderThreshold = 0
-    obj = servermanager.misc.GlobalMapperProperties()
-    obj.GlobalImmediateModeRendering = 0
-
-    # Test different configurations
-    title = 'display lists, no triangle strips, solid color'
-    obj.GlobalImmediateModeRendering = 0
-    results.append(__render(ss, v, title, nframes))
-
-    title = 'no display lists, no triangle strips, solid color'
-    obj.GlobalImmediateModeRendering = 1
-    results.append(__render(ss, v, title, nframes))
-
-    # Color by normals
-    lt = servermanager.rendering.PVLookupTable()
-    rep.LookupTable = lt
-    rep.ColorAttributeType = 0 # point data
-    rep.ColorArrayName = "Normals"
-    lt.RGBPoints = [-1, 0, 0, 1, 0.0288, 1, 0, 0]
-    lt.ColorSpace = 'HSV'
-    lt.VectorComponent = 0
-
-    title = 'display lists, no triangle strips, color by array'
-    obj.GlobalImmediateModeRendering = 0
-    results.append(__render(ss, v, title, nframes))
-
-    title = 'no display lists, no triangle strips, color by array'
-    obj.GlobalImmediateModeRendering = 1
-    results.append(__render(ss, v, title, nframes))
-
-    if filename:
-        f = open(filename, "w")
-    else:
-        f = sys.stdout
-    print >>f, 'configuration, %d, %d' % (results[0][1][0], results[0][2][0])
-    for i in results:
-        print >>f, '"%s", %g, %g' % (i[0], i[1][1], i[2][1])
-
-if __name__ == "__main__":
-    run()
diff --git a/uvcdatspt/scripts/ocean.py b/uvcdatspt/scripts/ocean.py
deleted file mode 100644
index 932d4e2deadd4d2e788d6e649d258cad85fcc0e0..0000000000000000000000000000000000000000
--- a/uvcdatspt/scripts/ocean.py
+++ /dev/null
@@ -1,187 +0,0 @@
-
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-import benchmark
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-
-# global variables
-timeCompartmentSize = 4
-input_files = "/home/boonth/Desktop/spatio/ocean_4/SALT*"
-iso_files = "/home/boonth/Desktop/spatio/ocean/salt_%i.pvtp"
-
-currentTimeStep = -1
-log_lines_per_file = 5
-
-
-# some initial setup
-benchmark.maximize_logs()
-
-pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-timer = paraview.vtk.vtkTimerLog()
-if len(sys.argv) < 1:
-    print 'usage: <num files>'
-else:
-    num_files = int(sys.argv[1])
-numprocs = pm.GetGlobalController().GetNumberOfProcesses()
-timer.SetMaxEntries(log_lines_per_file * num_files * numprocs + 2)
-pm.GetGlobalController().Barrier()
-timer.StartTimer()
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers, reader, contour):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        #folder = (currentTimeStep % 3) + 1
-        #fname = originalfilename % (folder, currentTimeStep)
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views, reader, contour):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        sys.stdout.flush()
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers, reader, contour)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-def main():
-
-    global timer
-    global timeCompartmentSize
-
-    tp_writers = []
-    tp_views = []
-
-    # ============ end of specialized temporal parallelism sections ==========
-
-    globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-    reader = CreateReader( NetCDFReader, ["Dimensions='(depth_t, t_lat, t_lon)'", 'ReplaceFillValueWithNan=0', 'VerticalBias=0.0', "OutputType='Automatic'", 'SphericalCoordinates=1', 'VerticalScale=1.0'], input_files )
-    timeSteps = GetActiveSource().TimestepValues
-    if len(timeSteps) == 0:
-        timeSteps = [0.0]
-    contour = Contour( guiName="contour", Isosurfaces=[0.03], ComputeNormals=1, ComputeGradients=0, ComputeScalars=0, ContourBy=['POINTS', 'SALT'], PointMergeMethod="Uniform Binning" )
-    contour.PointMergeMethod.Numberofpointsperbucket = 8
-    contour.PointMergeMethod.Divisions = [50, 50, 50]
-
-    ParallelPolyDataWriter2 = CreateWriter(XMLPPolyDataWriter,iso_files,tp_writers)
-
-    IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views, reader, contour)
-
-    globalController.Barrier()
-    timer.StopTimer()
-
-    gid = globalController.GetLocalProcessId()
-    if gid == 0:
-        print 'all done! -- total time is', timer.GetElapsedTime(), 'seconds'
-
-    benchmark.get_logs()
-    if gid == 0:
-        benchmark.print_logs()
-
-if __name__ == '__main__':
-    if len(sys.argv) < 1:
-        print 'usage: <num files>'
-    else:
-        main()
-