Commit 239b8126 authored by Burlen Loring's avatar Burlen Loring
Browse files

Merge branch 'request_specific_meshes' into 'master'

Request specific meshes

See merge request !55
parents 9417b0dc f56fe0f9
......@@ -15,7 +15,7 @@ endif()
set(tmp "-fPIC -std=c++11 -Wall -Wextra")
set(tmp "-fPIC -std=c++11 -Wall -Wextra -fpermissive")
set(tmp "${tmp} -static -static-libgcc -static-libstdc++ -pthread -Wl,-Bstatic")
......@@ -64,7 +64,7 @@ if (ENABLE_PYTHON)
file(STRINGS ${depend_file} depends)
OUTPUT ${output_file}
COMMAND ${swig_cmd} -c++ -python -threads -w341,325,401,504
COMMAND ${swig_cmd} -c++ -python -w341,325,401,504
......@@ -2,9 +2,9 @@
set enabled="1" on analyses you wish to enable -->
<!-- Custom Analyses -->
<analysis type="histogram" array="pressure" association="cell" bins="10" enabled="1" />
<analysis type="histogram" array="density" association="cell" bins="10" enabled="1" />
<analysis type="histogram" array="temperature" association="cell" bins="10" enabled="1" />
<analysis type="histogram" mesh="mesh" array="pressure" association="cell" bins="10" enabled="1" />
<analysis type="histogram" mesh="mesh" array="density" association="cell" bins="10" enabled="1" />
<analysis type="histogram" mesh="mesh" array="temperature" association="cell" bins="10" enabled="1" />
<!-- ADIOS Analyses -->
<analysis type="adios" filename="3D_Grid.bp" method="MPI" enabled ="0"/>
<!-- Custom Analyses -->
<analysis type="histogram" array="v" association="point"
bins="10" enabled="1" />
<analysis type="histogram" mesh="bodies" array="v" association="point"
bins="10" enabled="0" />
<!-- Available with ENABLE_VTK_XMLP -->
<analysis type="PosthocIO" array="vx" association="point"
enabled="0" period="1" mode="vtkXmlP" output_dir="./" />
<analysis type="PosthocIO" mode="paraview" output_dir="./" enabled="0">
<mesh name="bodies">
<point_arrays> ids, m, v, f </point_arrays>
<!-- Avalailable with ENABLE_CATALYST -->
<analysis type="catalyst" pipeline="pythonscript"
filename="" enabled="0" />
filename="../sensei/miniapps/newton/" enabled="1" />
<!-- Available with ENABLE_LIBSIM -->
<analysis type="libsim" plots="Pseudocolor" plotvars="ids"
......@@ -2,13 +2,17 @@
set enabled="1" on analyses you wish to enable -->
<!-- Custom Analyses-->
<analysis type="PosthocIO" array="data" association="cell"
enabled="0" period="1" mode="vtkXmlP" output_dir="./" />
<analysis type="PosthocIO"
output_dir="./" file_name="output" mode="visit" enabled="1">
<mesh name="mesh">
<cell_arrays> data </cell_arrays>
<analysis type="histogram" array="data" association="cell"
<analysis type="histogram" mesh="mesh" array="data" association="cell"
bins="10" enabled="1" />
<analysis type="autocorrelation" array="data" association="cell" window="10"
<analysis type="autocorrelation" mesh="mesh" array="data" association="cell" window="10"
k-max="3" enabled="1" />
<!-- VTK-m Analyses -->
# Overview
VTK has two ways of representing parallel distributed data. In the first each
MPI rank has a single object derived from
[vtkDataSet]( We
will call this the legacy approach. In the second case each MPI rank has a
single object derived from
which contains any number of local and remote datasets. We will call this the
composite approach. We treat the legacy approach as a special case of the
composite approach. Use of a data object type enumeration at the top level
enables differentiation between the two cases.
Each VTK object that can be serialized has a corresponding serilaizer derived
from [senseiADIOS::Schema](#senseiadiosschema). Serializers are expected to serialize data and metadata
needed to represent the object, or pass the object to lower level serializers
to accomplish this task. As composite data objects are traversed each high
level serializer class is given the chance to serialize each leaf dataset. When
they are passed a dataset that they can't handle they will ignore it.
At the highest level we provide serialization of collections of VTK data objects,
each of which can be a composite data object containing any number of datasets or
nested composite objects. Unique id's are given to each object in the collection,
and also each dataset in the object. Top level data objects are given a unique id,
called a `doid`, while nested datasets make use of the so called flat index provided by
VTK, called a `dsid`. For example the first dataset in the second object is identified by
the path:
The remainder of this document details each serializer, and what it writes to
the ADIOS file/stream.
# senseiADIOS::Schema
This is the base class defining API to serialize/deserialize VTK collections of
VTK data objects using in ADIOS. The [senseiADIOS::Schema](#senseiadiosschema) class declares API to
accomplish steps involved in writing/reading data with ADIOS. A common theme
when dealing with parallel distributed VTK datasets is traversing the composite
objects and operating on leaf datasets. Thus the class provides default
implementation for the traversal of composite datasets leaving derived classes
to implement an override to process leaf datasets.
# senseiADIOS::DataObjectCollectionSchema
This class serializes/deserializes collections of
and global metadata such as object names, time, time step, and schema version.
Each object in the collection is serialized by
### writes/reads
path | description
--- | ---
`SENSEIDataObjectSchema` | schema revision, unsigned int
`time` | current simulation time, double
`time_step` | current simulation step, double
`number_of_data_objects` | number of objects serialized, integer
`data_object_<doid>/name` | the name of each object, string
# senseiADIOS::DataObjectSchema
This class serializes/deserializes metadata for
and passes the data object off to the
([senseiADIOS::DatasetSchema](#senseiadiosdatasetschema)) for serialization of leaf
### writes/reads
path | description
--- | ---
`data_object_<doid>/number_of_datasets` | number of leaves in composite dataset, integer
`data_object_<doid>/data_object_type` | VTK data object type enumeration, integer
# senseiADIOS::DatasetSchema
This class serializes/deserializes metadata for
[vtkDataSet]( and
manages lower level specialized serialization objects
[senseiADIOS::Extent3DSchema](#senseiadiosextent3dschema)) that serialize/deserialize VTK
datasets derived from vtkDataSet.
### writes/reads
path | description
--- | ---
`data_object_<doid>/dataset_<dsid>/data_object_type` | VTK dataset type enumeration, integer
# senseiADIOS::Extent3DSchema
This class serializes/deserializes metadata needed to represent geometry of uniform
Cartesian meshes, in VTK the
### writes/reads
path | description
--- | ---
`data_object_<doid>/dataset_<dsid>/extent` | index space extents, 6 integers
`data_object_<doid>/dataset_<dsid>/origin` | coordinate system origin, 3 doubles
`data_object_<doid>/dataset_<dsid>/spacing` | grid spacing, 3 doubles
# senseiADIOS::PointsSchema
This class serializes/deserializes coordinates of unstructured meshes derived
from VTK's
### writes/reads
path | description
--- | ---
`data_object_<doid>/dataset_<dsid>/points/number_of_elements` | length of the array, unsigned long
`data_object_<doid>/dataset_<dsid>/points/type` | VTK data type enumeration, integer
`data_object_<doid>/dataset_<dsid>/points/data` | the array values
# senseiADIOS::CellsSchema
This class serializes/deserializes mesh topology for unstructured meshes of VTK's
and [vtkPolyData](
### writes/reads
path | description
--- | ---
`data_object_<doid>/dataset_<dsid>/cells/number_of_cells` | number of cells, unsigned long
`data_object_<doid>/dataset_<dsid>/cells/cell_types` | array of VTK cell type enumeration
`data_object_<doid>/dataset_<dsid>/cells/number_of_elements` | length of the cell array
`data_object_<doid>/dataset_<dsid>/cells/data` | the cell array values
# senseiADIOS::DatasetAttributesSchema
This class serializes/deserializes
the containers for cell and point centered data arrays and the contained data
arrays. It is templated on attribute enumeration `att_t` which is used as a tag
in the schema. `att_str` is a string representation of `att_t`. For convenience
we define the following typedefs:
// specializations for common use cases
using PointDataSchema = DatasetAttributesSchema<vtkDataObject::POINT>;
using CellDataSchema = DatasetAttributesSchema<vtkDataObject::CELL>;
### writes/reads
path | description
--- | ---
`data_object_<doid>/dataset_<dsid>/<att_str>/number_of_arrays` | number of arrays, integer
`data_object_<doid>/dataset_<dsid>/<att_str>/array_<i>/name` | name of the array, string
`data_object_<doid>/dataset_<dsid>/<att_str>/array_<i>/number_of_elements` | length of the array, unsigned long
`data_object_<doid>/dataset_<dsid>/<att_str>/array_<i>/number_of_components` | number of components, integer
`data_object_<doid>/dataset_<dsid>/<att_str>/array_<i>/element_type` | VTK data type enumeration, integer
`data_object_<doid>/dataset_<dsid>/<att_str>/array_<i>/data` | the array values
# Examples
## Aggregate data
This example shows the file structure of a collection comprised of a 2 block
multi-block uniform Cartesian mesh and a 2 block multi-block unstructured mesh
over 3 time steps. Each mesh contains 1 single precision cell data and 1 single
precision point data array. The code generating and writing the data,
`` is capable of writing BP files or streaming over FLEXPATH
and is part of the regression test suite distributed with the source code. Its
counterpart `` can be used to deserialize the file/stream.
$mpiexec -np 2 python ../sensei/sensei/testing/ test.bp MPI 3
STATUS[0] : initializing the VTKDataAdaptor step 0 time 0.0
STATUS[0] : executing ADIOSAnalysisAdaptor MPI step 0 time 0.0
WARNING: [0][/home/sensei/sc17/software/sensei/builds/sensei/sensei/ADIOSAnalysisAdaptor.cxx:82][v1.1.0]
WARNING: No subset specified. Writing all available data
STATUS[0] : finished writing 1 steps
The `bpls` tool that ships with ADIOS can be used to display the file structure
and dump arrays.
$bpls test.bp
unsigned long long time_step 3*scalar
double time 3*scalar
integer number_of_data_objects 3*scalar
integer data_object_0/name_len 3*scalar
byte data_object_0/name 3*{6}
unsigned integer data_object_0/number_of_datasets 3*scalar
integer data_object_0/data_object_type 3*scalar
integer data_object_0/dataset_1/data_object_type 3*scalar
integer data_object_0/dataset_1/extent_len 3*scalar
integer data_object_0/dataset_1/extent 3*{6}
integer data_object_0/dataset_1/origin_len 3*scalar
double data_object_0/dataset_1/origin 3*{3}
integer data_object_0/dataset_1/spacing_len 3*scalar
double data_object_0/dataset_1/spacing 3*{3}
integer data_object_0/dataset_1/point_data/number_of_arrays 3*scalar
integer data_object_0/dataset_1/point_data/array_0/name_len 3*scalar
byte data_object_0/dataset_1/point_data/array_0/name 3*{11}
long long data_object_0/dataset_1/point_data/array_0/number_of_elements 3*scalar
integer data_object_0/dataset_1/point_data/array_0/number_of_components 3*scalar
integer data_object_0/dataset_1/point_data/array_0/element_type 3*scalar
real data_object_0/dataset_1/point_data/array_0/data 3*{108}
integer data_object_0/dataset_1/cell_data/number_of_arrays 3*scalar
integer data_object_0/dataset_1/cell_data/array_0/name_len 3*scalar
byte data_object_0/dataset_1/cell_data/array_0/name 3*{11}
long long data_object_0/dataset_1/cell_data/array_0/number_of_elements 3*scalar
integer data_object_0/dataset_1/cell_data/array_0/number_of_components 3*scalar
integer data_object_0/dataset_1/cell_data/array_0/element_type 3*scalar
real data_object_0/dataset_1/cell_data/array_0/data 3*{34}
integer data_object_1/name_len 3*scalar
byte data_object_1/name 3*{13}
unsigned integer data_object_1/number_of_datasets 3*scalar
integer data_object_1/data_object_type 3*scalar
integer data_object_1/dataset_1/data_object_type 3*scalar
unsigned long long data_object_1/dataset_1/cells/number_of_cells 3*scalar
unsigned byte data_object_1/dataset_1/cells/cell_types 3*{16}
unsigned long long data_object_1/dataset_1/cells/number_of_elements 3*scalar
long long data_object_1/dataset_1/cells/data 3*{32}
unsigned long long data_object_1/dataset_1/points/number_of_elements 3*scalar
integer data_object_1/dataset_1/points/elem_type 3*scalar
real data_object_1/dataset_1/points/data 3*{48}
integer data_object_1/dataset_1/point_data/number_of_arrays 3*scalar
integer data_object_1/dataset_1/point_data/array_0/name_len 3*scalar
byte data_object_1/dataset_1/point_data/array_0/name 3*{11}
long long data_object_1/dataset_1/point_data/array_0/number_of_elements 3*scalar
integer data_object_1/dataset_1/point_data/array_0/number_of_components 3*scalar
integer data_object_1/dataset_1/point_data/array_0/element_type 3*scalar
real data_object_1/dataset_1/point_data/array_0/data 3*{16}
integer data_object_1/dataset_1/cell_data/number_of_arrays 3*scalar
integer data_object_1/dataset_1/cell_data/array_0/name_len 3*scalar
byte data_object_1/dataset_1/cell_data/array_0/name 3*{11}
long long data_object_1/dataset_1/cell_data/array_0/number_of_elements 3*scalar
integer data_object_1/dataset_1/cell_data/array_0/number_of_components 3*scalar
integer data_object_1/dataset_1/cell_data/array_0/element_type 3*scalar
real data_object_1/dataset_1/cell_data/array_0/data 3*{16}
integer data_object_0/dataset_2/data_object_type 3*scalar
integer data_object_0/dataset_2/extent_len 3*scalar
integer data_object_0/dataset_2/extent 3*{6}
integer data_object_0/dataset_2/origin_len 3*scalar
double data_object_0/dataset_2/origin 3*{3}
integer data_object_0/dataset_2/spacing_len 3*scalar
double data_object_0/dataset_2/spacing 3*{3}
integer data_object_0/dataset_2/point_data/number_of_arrays 3*scalar
integer data_object_0/dataset_2/point_data/array_0/name_len 3*scalar
byte data_object_0/dataset_2/point_data/array_0/name 3*{11}
long long data_object_0/dataset_2/point_data/array_0/number_of_elements 3*scalar
integer data_object_0/dataset_2/point_data/array_0/number_of_components 3*scalar
integer data_object_0/dataset_2/point_data/array_0/element_type 3*scalar
real data_object_0/dataset_2/point_data/array_0/data 3*{108}
integer data_object_0/dataset_2/cell_data/number_of_arrays 3*scalar
integer data_object_0/dataset_2/cell_data/array_0/name_len 3*scalar
byte data_object_0/dataset_2/cell_data/array_0/name 3*{11}
long long data_object_0/dataset_2/cell_data/array_0/number_of_elements 3*scalar
integer data_object_0/dataset_2/cell_data/array_0/number_of_components 3*scalar
integer data_object_0/dataset_2/cell_data/array_0/element_type 3*scalar
real data_object_0/dataset_2/cell_data/array_0/data 3*{34}
integer data_object_1/dataset_2/data_object_type 3*scalar
unsigned long long data_object_1/dataset_2/cells/number_of_cells 3*scalar
unsigned byte data_object_1/dataset_2/cells/cell_types 3*{16}
unsigned long long data_object_1/dataset_2/cells/number_of_elements 3*scalar
long long data_object_1/dataset_2/cells/data 3*{32}
unsigned long long data_object_1/dataset_2/points/number_of_elements 3*scalar
integer data_object_1/dataset_2/points/elem_type 3*scalar
real data_object_1/dataset_2/points/data 3*{48}
integer data_object_1/dataset_2/point_data/number_of_arrays 3*scalar
integer data_object_1/dataset_2/point_data/array_0/name_len 3*scalar
byte data_object_1/dataset_2/point_data/array_0/name 3*{11}
long long data_object_1/dataset_2/point_data/array_0/number_of_elements 3*scalar
integer data_object_1/dataset_2/point_data/array_0/number_of_components 3*scalar
integer data_object_1/dataset_2/point_data/array_0/element_type 3*scalar
byte data_object_1/dataset_2/point_data/array_0/data 3*{16}
integer data_object_1/dataset_2/cell_data/number_of_arrays 3*scalar
integer data_object_1/dataset_2/cell_data/array_0/name_len 3*scalar
byte data_object_1/dataset_2/cell_data/array_0/name 3*{11}
long long data_object_1/dataset_2/cell_data/array_0/number_of_elements 3*scalar
integer data_object_1/dataset_2/cell_data/array_0/number_of_components 3*scalar
integer data_object_1/dataset_2/cell_data/array_0/element_type 3*scalar
real data_object_1/dataset_2/cell_data/array_0/data 3*{16}
......@@ -120,6 +120,7 @@ int main(int argc, char **argv)
// close the ADIOS stream
// we must force these to be destroyed before mpi finalize
// some of the adaptors make MPI calls in the destructor
......@@ -2,8 +2,3 @@ if(ENABLE_ADIOS)
add_executable(ADIOSAnalysisEndPoint ADIOSAnalysisEndPoint.cxx)
target_link_libraries(ADIOSAnalysisEndPoint PRIVATE opts mpi adios sensei timer)
add_executable(PosthocIOEndPoint PosthocIOEndPoint.cxx)
target_link_libraries(PosthocIOEndPoint PRIVATE opts mpi adios sensei timer)
......@@ -297,9 +297,146 @@ def check_arg(dic, arg, dfl=None, req=True):
return True
return True
class data_adaptor:
def __init__(self):
# data from sim
self.arrays = {}
self.points = None
self.cells = None
# connect all the callbacks
self.pda = sensei.ProgrammableDataAdaptor.New()
def __getattr__(self, *args):
# forward calls to pda
return self.pda.__getattribute__(*args)
def base(self):
return self.pda
def update(self, i,t,ids,x,y,z,m,vx,vy,vz,fx,fy,fz):
# update the state arrays
self.set_array_1(ids, 'ids')
self.set_array_1(m, 'm')
self.set_array_3(vx,vy,vz, 'v')
self.set_array_3(fx,fy,fz, 'f')
def set_array_1(self, vals, name):
arr = vtknp.numpy_to_vtk(vals, 1)
self.arrays[name] = arr
def set_array_3(self, vx,vy,vz, name):
# vector
nx = len(x)
vxyz = np.zeros(3*nx, dtype=vx.dtype)
vxyz[::3] = vx
vxyz[1::3] = vy
vxyz[2::3] = vz
vtkv = vtknp.numpy_to_vtk(vxyz, deep=1)
self.arrays[name] = vtkv
# mag
mname = 'mag%s'%(name)
mv = np.sqrt(vx**2 + vy**2 + vz**2)
vtkmv = vtknp.numpy_to_vtk(mv, deep=1)
self.arrays[mname] = vtkmv
def set_geometry(self, x,y,z):
# points
nx = len(x)
xyz = np.zeros(3*nx, dtype=x.dtype)
xyz[::3] = x[:]
xyz[1::3] = y[:]
xyz[2::3] = z[:]
vxyz = vtknp.numpy_to_vtk(xyz, deep=1)
pts = vtk.vtkPoints()
self.points = pts
# cells
cids = np.empty(2*nx, dtype=np.int32)
cids[::2] = 1
cids[1::2] = np.arange(0,nx,dtype=np.int32)
cells = vtk.vtkCellArray()
cells.SetCells(nx, vtknp.numpy_to_vtk(cids, \
deep=1, array_type=vtk.VTK_ID_TYPE))
self.cells = cells
def validate_mesh_name(self, mesh_name):
if mesh_name != "bodies":
raise RuntimeError('no mesh named "%s"'%(mesh_name))
def get_number_of_meshes(self):
def callback():
return 1
return callback
def get_mesh_name(self):
def callback(idx):
if idx != 0: raise RuntimeError('no mesh %d'%(idx))
return 'bodies'
return callback
def get_number_of_arrays(self):
def callback(mesh_name, assoc):
return len(self.arrays.keys()) \
if assoc == vtk.vtkDataObject.POINT else 0
return callback
def get_array_name(self):
def callback(mesh_name, assoc, idx):
return self.arrays.keys()[idx] \
if assoc == vtk.vtkDataObject.POINT else 0
return callback
def get_mesh(self):
def callback(mesh_name, structure_only):
# local bodies
pd = vtk.vtkPolyData()
if not structure_only:
# global dataset
mb = vtk.vtkMultiBlockDataSet()
mb.SetBlock(rank, pd)
return mb
return callback
def add_array(self):
def callback(mesh, mesh_name, assoc, array_name):
if assoc != vtk.vtkDataObject.POINT:
raise RuntimeError('no array named "%s" in cell data'%(array_name))
pd = mesh.GetBlock(rank)
return callback
def release_data(self):
def callback():
self.arrays = {}
self.points = None
self.cells = None
return callback
class analysis_adaptor:
def __init__(self):
self.DataAdaptor = sensei.VTKDataAdaptor.New()
self.DataAdaptor = data_adaptor()
self.AnalysisAdaptor = None
def initialize(self, analysis, args=''):
......@@ -317,13 +454,14 @@ class analysis_adaptor:
self.AnalysisAdaptor = sensei.CatalystAnalysisAdaptor.New()
elif analysis == 'posthoc':
if check_arg(args,'file','newton') and check_arg(args,'dir','./') \
and check_arg(args,'mode','0') and check_arg(args,'freq','1'):
self.AnalysisAdaptor = sensei.VTKPosthocIO.New()
self.AnalysisAdaptor.Initialize(comm, args['dir'],args['file'],\
[], ['ids','fx','fy','fz','f','vx','vy','vz','v','m'], \
int(args['mode']), int(args['freq']))
#elif analysis == 'posthoc':
# if check_arg(args,'file','newton') and check_arg(args,'dir','./') \
# and check_arg(args,'mode','0') and check_arg(args,'freq','1'):
# # TODO -- mesh name API updates
# self.AnalysisAdaptor = sensei.VTKPosthocIO.New()
# self.AnalysisAdaptor.Initialize(comm, args['dir'],args['file'],\
# [], ['ids','fx','fy','fz','f','vx','vy','vz','v','m'], \
# int(args['mode']), int(args['freq']))
# Libisim, ADIOS, etc
elif analysis == 'configurable':
if check_arg(args,'config'):
......@@ -335,31 +473,31 @@ class analysis_adaptor:
def finalize(self):
if self.Analysis == 'posthoc':
def update(self, i,t,ids,x,y,z,m,vx,vy,vz,fx,fy,fz):
status('% 5d\n'%(i)) if i > 0 and i % 70 == 0 else None
node = points_to_polydata(ids,x,y,z,m,vx,vy,vz,fx,fy,fz)
mb = vtk.vtkMultiBlockDataSet()
mb.SetBlock(rank, node)
#node = points_to_polydata(ids,x,y,z,m,vx,vy,vz,fx,fy,fz)
#mb = vtk.vtkMultiBlockDataSet()
#mb.SetBlock(rank, node)
def status(msg):
sys.stderr.write(msg if rank == 0 else '')
if __name__ == '__main__':
# parse the command line
parser = argparse.ArgumentParser()
......@@ -6,6 +6,8 @@ from paraview import lookuptable
paraview.simple.lookuptable = lookuptable
paraview.simple._lutReader = lookuptable.vtkPVLUTReader()
import sys
# Code generated from to create the CoProcessor.
# ParaView 5.3.0-78-gd6e7170 64 bits
......@@ -54,7 +56,7 @@ def CreateCoProcessor():
# create a new 'PVD Reader'
# create a producer from a simulation input