DIY  3.0
data-parallel out-of-core C++ library
 All Classes Namespaces Functions Typedefs Groups Pages
io.hpp
1 #ifndef DIY_MPI_IO_HPP
2 #define DIY_MPI_IO_HPP
3 
4 #include "../constants.h"
5 
6 #include <vector>
7 #include <string>
8 
9 namespace diy
10 {
11 namespace mpi
12 {
13 namespace io
14 {
15  typedef MPI_Offset offset;
16 
18  class file
19  {
20  public:
21  enum
22  {
23  rdonly = MPI_MODE_RDONLY,
24  rdwr = MPI_MODE_RDWR,
25  wronly = MPI_MODE_WRONLY,
26  create = MPI_MODE_CREATE,
27  exclusive = MPI_MODE_EXCL,
28  delete_on_close = MPI_MODE_DELETE_ON_CLOSE,
29  unique_open = MPI_MODE_UNIQUE_OPEN,
30  sequential = MPI_MODE_SEQUENTIAL,
31  append = MPI_MODE_APPEND
32  };
33 
34  public:
35  inline file(const communicator& comm, const std::string& filename, int mode);
36  ~file() { close(); }
37  inline void close();
38 
39  inline offset size() const;
40  inline void resize(offset size);
41 
42  inline void read_at(offset o, char* buffer, size_t size);
43  inline void read_at_all(offset o, char* buffer, size_t size);
44  inline void write_at(offset o, const char* buffer, size_t size);
45  inline void write_at_all(offset o, const char* buffer, size_t size);
46 
47  template<class T>
48  inline void read_at(offset o, std::vector<T>& data);
49 
50  template<class T>
51  inline void read_at_all(offset o, std::vector<T>& data);
52 
53  template<class T>
54  inline void write_at(offset o, const std::vector<T>& data);
55 
56  template<class T>
57  inline void write_at_all(offset o, const std::vector<T>& data);
58 
59  const communicator&
60  comm() const { return comm_; }
61 
62  MPI_File& handle() { return fh; }
63 
64  private:
65  const communicator& comm_;
66  MPI_File fh;
67  };
68 }
69 }
70 }
71 
72 diy::mpi::io::file::
73 file(const communicator& comm__, const std::string& filename, int mode)
74 : comm_(comm__)
75 {
76 #ifndef DIY_NO_MPI
77  MPI_File_open(comm__, const_cast<char*>(filename.c_str()), mode, MPI_INFO_NULL, &fh);
78 #else
79  DIY_UNUSED(comm__);
80  DIY_UNUSED(filename);
81  DIY_UNUSED(mode);
82  DIY_UNSUPPORTED_MPI_CALL(MPI_File_open);
83 #endif
84 }
85 
86 void
87 diy::mpi::io::file::
88 close()
89 {
90 #ifndef DIY_NO_MPI
91  if (fh != MPI_FILE_NULL)
92  MPI_File_close(&fh);
93 #endif
94 }
95 
96 diy::mpi::io::offset
97 diy::mpi::io::file::
98 size() const
99 {
100 #ifndef DIY_NO_MPI
101  offset sz;
102  MPI_File_get_size(fh, &sz);
103  return sz;
104 #else
105  DIY_UNSUPPORTED_MPI_CALL(MPI_File_get_size);
106 #endif
107 }
108 
109 void
110 diy::mpi::io::file::
111 resize(diy::mpi::io::offset size_)
112 {
113 #ifndef DIY_NO_MPI
114  MPI_File_set_size(fh, size_);
115 #else
116  DIY_UNUSED(size_);
117  DIY_UNSUPPORTED_MPI_CALL(MPI_File_set_size);
118 #endif
119 }
120 
121 void
122 diy::mpi::io::file::
123 read_at(offset o, char* buffer, size_t size_)
124 {
125 #ifndef DIY_NO_MPI
126  status s;
127  MPI_File_read_at(fh, o, buffer, static_cast<int>(size_), detail::get_mpi_datatype<char>(), &s.s);
128 #else
129  DIY_UNUSED(o);
130  DIY_UNUSED(buffer);
131  DIY_UNUSED(size_);
132  DIY_UNSUPPORTED_MPI_CALL(MPI_File_read_at);
133 #endif
134 }
135 
136 template<class T>
137 void
138 diy::mpi::io::file::
139 read_at(offset o, std::vector<T>& data)
140 {
141  read_at(o, &data[0], data.size()*sizeof(T));
142 }
143 
144 void
145 diy::mpi::io::file::
146 read_at_all(offset o, char* buffer, size_t size_)
147 {
148 #ifndef DIY_NO_MPI
149  status s;
150  MPI_File_read_at_all(fh, o, buffer, static_cast<int>(size_), detail::get_mpi_datatype<char>(), &s.s);
151 #else
152  DIY_UNUSED(o);
153  DIY_UNUSED(buffer);
154  DIY_UNUSED(size_);
155  DIY_UNSUPPORTED_MPI_CALL(MPI_File_read_at_all);
156 #endif
157 }
158 
159 template<class T>
160 void
161 diy::mpi::io::file::
162 read_at_all(offset o, std::vector<T>& data)
163 {
164  read_at_all(o, (char*) &data[0], data.size()*sizeof(T));
165 }
166 
167 void
168 diy::mpi::io::file::
169 write_at(offset o, const char* buffer, size_t size_)
170 {
171 #ifndef DIY_NO_MPI
172  status s;
173  MPI_File_write_at(fh, o, (void *)buffer, static_cast<int>(size_), detail::get_mpi_datatype<char>(), &s.s);
174 #else
175  DIY_UNUSED(o);
176  DIY_UNUSED(buffer);
177  DIY_UNUSED(size_);
178  DIY_UNSUPPORTED_MPI_CALL(MPI_File_write_at);
179 #endif
180 }
181 
182 template<class T>
183 void
184 diy::mpi::io::file::
185 write_at(offset o, const std::vector<T>& data)
186 {
187  write_at(o, (const char*) &data[0], data.size()*sizeof(T));
188 }
189 
190 void
191 diy::mpi::io::file::
192 write_at_all(offset o, const char* buffer, size_t size_)
193 {
194 #ifndef DIY_NO_MPI
195  status s;
196  MPI_File_write_at_all(fh, o, (void *)buffer, static_cast<int>(size_), detail::get_mpi_datatype<char>(), &s.s);
197 #else
198  DIY_UNUSED(o);
199  DIY_UNUSED(buffer);
200  DIY_UNUSED(size_);
201  DIY_UNSUPPORTED_MPI_CALL(MPI_File_write_at_all);
202 #endif
203 }
204 
205 template<class T>
206 void
207 diy::mpi::io::file::
208 write_at_all(offset o, const std::vector<T>& data)
209 {
210  write_at_all(o, &data[0], data.size()*sizeof(T));
211 }
212 
213 #endif
Wraps MPI file IO.
Definition: io.hpp:18
Simple wrapper around MPI_Comm.
Definition: communicator.hpp:8