Commit f37e6425 authored by Utkarsh Ayachit's avatar Utkarsh Ayachit

Remove `MultiBlock::GetGlobalRange` API.

Removing MultiBlock::GetGlobalRange API to keep things consistent with
DataSet API. Instead, one should use `FieldRangeCompute` or
`FieldRangeGlobalCompute` as appropriate.
parent 20a052f4
......@@ -321,117 +321,6 @@ VTKM_CONT vtkm::Bounds MultiBlock::GetBlockBounds(const std::size_t& block_index
return coords.GetBounds();
}
VTKM_CONT vtkm::cont::ArrayHandle<vtkm::Range> MultiBlock::GetGlobalRange(const int& index) const
{
assert(this->Blocks.size() > 0);
vtkm::cont::Field field = this->Blocks.at(0).GetField(index);
std::string field_name = field.GetName();
return this->GetGlobalRange(field_name);
}
VTKM_CONT vtkm::cont::ArrayHandle<vtkm::Range> MultiBlock::GetGlobalRange(
const std::string& field_name) const
{
using BlockMetaData = std::vector<vtkm::Range>;
auto comm = vtkm::cont::EnvironmentTracker::GetCommunicator();
diy::Master master(comm,
1,
-1,
[]() -> void* { return new BlockMetaData(); },
[](void* ptr) { delete static_cast<BlockMetaData*>(ptr); });
// create assigner that assigns blocks to ranks.
vtkm::cont::AssignerMultiBlock assigner(*this);
const int nblocks = assigner.nblocks(); // this is the total number of blocks across all ranks.
if (nblocks == 0)
{
// short circuit if there are no blocks in this multiblock globally.
return vtkm::cont::ArrayHandle<vtkm::Range>();
}
// populate master.
diy::RegularDecomposer<diy::DiscreteBounds> decomposer(1, diy::interval(0, nblocks - 1), nblocks);
decomposer.decompose(comm.rank(), assigner, master);
auto self = (*this);
master.foreach ([&](BlockMetaData* data, const diy::Master::ProxyWithLink& cp) {
const vtkm::cont::DataSet& block = vtkm::cont::detail::GetBlock(self, cp);
if (block.HasField(field_name))
{
auto field = block.GetField(field_name);
const vtkm::cont::ArrayHandle<vtkm::Range> range = field.GetRange();
*data = vtkm::cont::detail::CopyArrayPortalToVector(range.GetPortalConstControl());
}
});
// lets reduce range to block(gid=0).
diy::RegularMergePartners partners(decomposer, /*k=*/2);
auto callback =
[](BlockMetaData* data, const diy::ReduceProxy& srp, const diy::RegularMergePartners&) {
const auto selfid = srp.gid();
// 1. dequeue
BlockMetaData message;
std::vector<int> incoming;
srp.incoming(incoming);
for (const int gid : incoming)
{
if (gid != selfid)
{
srp.dequeue(gid, message);
data->resize(std::max(data->size(), message.size()));
for (size_t cc = 0; cc < data->size(); ++cc)
{
(*data)[cc].Include(message[cc]);
}
}
}
// 2. enqueue
for (int cc = 0; cc < srp.out_link().size(); ++cc)
{
auto target = srp.out_link().target(cc);
if (target.gid != selfid)
{
srp.enqueue(target, *data);
}
}
};
// reduce and produce aggregated range on block(gid=0).
diy::reduce(master, assigner, partners, callback);
// now broadcast out the range to all ranks.
BlockMetaData reduced_range;
if (master.local(0))
{
reduced_range = *(master.block<BlockMetaData>(master.lid(0)));
}
master.clear();
diy::ContiguousAssigner bAssigner(comm.size(), comm.size());
diy::RegularDecomposer<diy::DiscreteBounds> bDecomposer(
1, diy::interval(0, comm.size() - 1), comm.size());
bDecomposer.decompose(comm.rank(), bAssigner, master);
*master.block<BlockMetaData>(0) = reduced_range;
diy::RegularBroadcastPartners bPartners(bDecomposer, /*k=*/2);
// we can use the same `callback` as earlier since all blocks are
// initialized to empty, hence `vtkm::Range::Include()` should simply work
// as a copy.
diy::reduce(master, bAssigner, bPartners, callback);
assert(master.size() == 1);
reduced_range = *master.block<BlockMetaData>(0);
vtkm::cont::ArrayHandle<vtkm::Range> tmprange = vtkm::cont::make_ArrayHandle(reduced_range);
vtkm::cont::ArrayHandle<vtkm::Range> range;
vtkm::cont::ArrayCopy(vtkm::cont::make_ArrayHandle(reduced_range), range);
return range;
}
VTKM_CONT
void MultiBlock::PrintSummary(std::ostream& stream) const
......
......@@ -97,17 +97,6 @@ public:
vtkm::Bounds GetBlockBounds(const std::size_t& block_index,
vtkm::Id coordinate_system_index = 0) const;
//@{
/// Get the unified range of the same field within all contained DataSet.
/// These methods are not thread-safe and may involve global communication
/// across all ranks in distributed environments with MPI.
VTKM_CONT
vtkm::cont::ArrayHandle<vtkm::Range> GetGlobalRange(const std::string& field_name) const;
VTKM_CONT
vtkm::cont::ArrayHandle<vtkm::Range> GetGlobalRange(const int& index) const;
//@}
VTKM_CONT
void PrintSummary(std::ostream& stream) const;
......
......@@ -70,7 +70,7 @@ set(unit_tests
UnitTestDynamicArrayHandle.cxx
UnitTestDynamicCellSet.cxx
UnitTestFieldRangeCompute.cxx
UnitTestMultiBlock.cxx,MPI
UnitTestMultiBlock.cxx
UnitTestRuntimeDeviceInformation.cxx
UnitTestStorageBasic.cxx
UnitTestStorageImplicit.cxx
......
......@@ -27,8 +27,8 @@
#include <vtkm/cont/DataSet.h>
#include <vtkm/cont/DataSetFieldAdd.h>
#include <vtkm/cont/DynamicArrayHandle.h>
#include <vtkm/cont/EnvironmentTracker.h>
#include <vtkm/cont/Field.h>
#include <vtkm/cont/FieldRangeCompute.h>
#include <vtkm/cont/MultiBlock.h>
#include <vtkm/cont/serial/DeviceAdapterSerial.h>
#include <vtkm/cont/testing/MakeTestDataSet.h>
......@@ -54,11 +54,7 @@ static void MultiBlockTest()
multiblock.AddBlock(TDset1);
multiblock.AddBlock(TDset2);
const int procsize = vtkm::cont::EnvironmentTracker::GetCommunicator().size();
VTKM_TEST_ASSERT(multiblock.GetNumberOfBlocks() == 2, "Incorrect number of blocks");
VTKM_TEST_ASSERT(multiblock.GetGlobalNumberOfBlocks() == 2 * procsize,
"Incorrect number of blocks");
vtkm::cont::DataSet TestDSet = multiblock.GetBlock(0);
VTKM_TEST_ASSERT(TDset1.GetNumberOfFields() == TestDSet.GetNumberOfFields(),
......@@ -99,28 +95,14 @@ static void MultiBlockTest()
Field2GlobeRange.Include(Set1Field2Range);
Field2GlobeRange.Include(Set2Field2Range);
VTKM_TEST_ASSERT(multiblock.GetGlobalRange("pointvar").GetPortalConstControl().Get(0) ==
using vtkm::cont::FieldRangeCompute;
VTKM_TEST_ASSERT(FieldRangeCompute(multiblock, "pointvar").GetPortalConstControl().Get(0) ==
Field1GlobeRange,
"Local field value range info incorrect");
VTKM_TEST_ASSERT(multiblock.GetGlobalRange("cellvar").GetPortalConstControl().Get(0) ==
VTKM_TEST_ASSERT(FieldRangeCompute(multiblock, "cellvar").GetPortalConstControl().Get(0) ==
Field2GlobeRange,
"Local field value range info incorrect");
TDset1.GetField(0).GetRange(&Set1Field1Range);
TDset1.GetField(1).GetRange(&Set1Field2Range);
TDset2.GetField(0).GetRange(&Set2Field1Range);
TDset2.GetField(1).GetRange(&Set2Field2Range);
Field1GlobeRange.Include(Set1Field1Range);
Field1GlobeRange.Include(Set2Field1Range);
Field2GlobeRange.Include(Set1Field2Range);
Field2GlobeRange.Include(Set2Field2Range);
VTKM_TEST_ASSERT(multiblock.GetGlobalRange(0).GetPortalControl().Get(0) == Field1GlobeRange,
"Local field value range info incorrect");
VTKM_TEST_ASSERT(multiblock.GetGlobalRange(1).GetPortalControl().Get(0) == Field2GlobeRange,
"Local field value range info incorrect");
vtkm::Range SourceRange; //test the validity of member function GetField(FieldName, BlockId)
multiblock.GetField("cellvar", 0).GetRange(&SourceRange);
vtkm::Range TestRange;
......@@ -166,9 +148,7 @@ void DataSet_Compare(vtkm::cont::DataSet& LeftDateSet, vtkm::cont::DataSet& Righ
return;
}
int UnitTestMultiBlock(int argc, char* argv[])
int UnitTestMultiBlock(int, char* [])
{
diy::mpi::environment env(argc, argv);
vtkm::cont::EnvironmentTracker::SetCommunicator(diy::mpi::communicator(MPI_COMM_WORLD));
return vtkm::cont::testing::Testing::Run(MultiBlockTest);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment