Local atom set handling during simulations.
[gromacs.git] / src / gromacs / domdec / domdec.h
blob927e5ea66056ef73016817515ef48ba2b2431335
1 /*
2 * This file is part of the GROMACS molecular simulation package.
4 * Copyright (c) 2005,2006,2007,2008,2009,2010,2012,2013,2014,2015,2016,2017,2018, by the GROMACS development team, led by
5 * Mark Abraham, David van der Spoel, Berk Hess, and Erik Lindahl,
6 * and including many others, as listed in the AUTHORS file in the
7 * top-level source directory and at http://www.gromacs.org.
9 * GROMACS is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public License
11 * as published by the Free Software Foundation; either version 2.1
12 * of the License, or (at your option) any later version.
14 * GROMACS is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with GROMACS; if not, see
21 * http://www.gnu.org/licenses, or write to the Free Software Foundation,
22 * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
24 * If you want to redistribute modifications to GROMACS, please
25 * consider that scientific software is very special. Version
26 * control is crucial - bugs must be traceable. We will be happy to
27 * consider code for inclusion in the official distribution, but
28 * derived work must not be called official GROMACS. Details are found
29 * in the README & COPYING files - if they are missing, get the
30 * official version at http://www.gromacs.org.
32 * To help us fund GROMACS development, we humbly ask that you cite
33 * the research papers on the package. Check out http://www.gromacs.org.
35 /*! \defgroup module_domdec Spatial domain decomposition (for parallelization over MPI)
36 * \ingroup group_mdrun
38 * \brief Manages the decomposition of the simulation volume over MPI
39 * ranks to try to distribute work evenly with minimal communication
40 * overheads.
42 * \todo Get domdec stuff out of mdtypes/commrec.h
44 * \author Berk Hess <hess@kth.se>
48 /*! \libinternal \file
50 * \brief This file declares functions for mdrun to call to manage the
51 * details of its domain decomposition.
53 * \author Berk Hess <hess@kth.se>
54 * \inlibraryapi
55 * \ingroup module_domdec
58 #ifndef GMX_DOMDEC_DOMDEC_H
59 #define GMX_DOMDEC_DOMDEC_H
61 #include <vector>
63 #include "gromacs/math/paddedvector.h"
64 #include "gromacs/math/vectypes.h"
65 #include "gromacs/utility/arrayref.h"
66 #include "gromacs/utility/basedefinitions.h"
67 #include "gromacs/utility/real.h"
69 struct cginfo_mb_t;
70 struct gmx_domdec_t;
71 struct gmx_ddbox_t;
72 struct gmx_domdec_zones_t;
73 struct gmx_localtop_t;
74 struct gmx_mtop_t;
75 struct gmx_vsite_t;
76 struct MdrunOptions;
77 struct t_block;
78 struct t_blocka;
79 struct t_commrec;
80 struct t_forcerec;
81 struct t_inputrec;
82 struct t_mdatoms;
83 struct t_nrnb;
84 struct gmx_wallcycle;
85 class t_state;
87 namespace gmx
89 class Constraints;
90 class MDAtoms;
91 class LocalAtomSetManager;
92 } // namespace
94 /*! \brief Returns the global topology atom number belonging to local atom index i.
96 * This function is intended for writing ASCII output
97 * and returns atom numbers starting at 1.
98 * When dd=NULL returns i+1.
100 int ddglatnr(const gmx_domdec_t *dd, int i);
102 /*! \brief Return a block struct for the charge groups of the whole system */
103 t_block *dd_charge_groups_global(struct gmx_domdec_t *dd);
105 /*! \brief Store the global cg indices of the home cgs in state,
107 * This means it can be reset, even after a new DD partitioning.
109 void dd_store_state(struct gmx_domdec_t *dd, t_state *state);
111 /*! \brief Returns a pointer to the gmx_domdec_zones_t struct */
112 struct gmx_domdec_zones_t *domdec_zones(struct gmx_domdec_t *dd);
114 /*! \brief Sets the j-charge-group range for i-charge-group \p icg */
115 void dd_get_ns_ranges(const gmx_domdec_t *dd, int icg,
116 int *jcg0, int *jcg1, ivec shift0, ivec shift1);
118 /*! \brief Returns the number of home atoms */
119 int dd_numHomeAtoms(const gmx_domdec_t &dd);
121 /*! \brief Returns the atom range in the local state for atoms that need to be present in mdatoms */
122 int dd_natoms_mdatoms(const gmx_domdec_t *dd);
124 /*! \brief Returns the atom range in the local state for atoms involved in virtual sites */
125 int dd_natoms_vsite(const gmx_domdec_t *dd);
127 /*! \brief Sets the atom range for atom in the local state for atoms received in constraints communication */
128 void dd_get_constraint_range(const gmx_domdec_t *dd,
129 int *at_start, int *at_end);
131 /*! \libinternal \brief Struct for passing around the number of PME domains */
132 struct NumPmeDomains
134 int x; //!< The number of PME domains along dimension x
135 int y; //!< The number of PME domains along dimension y
138 /*! \brief Returns the number of PME domains, can be called with dd=NULL */
139 NumPmeDomains getNumPmeDomains(const gmx_domdec_t *dd);
141 /*! \brief Returns the set of DD ranks that communicate with pme node cr->nodeid */
142 std::vector<int> get_pme_ddranks(const t_commrec *cr, int pmenodeid);
144 /*! \brief Returns the maximum shift for coordinate communication in PME, dim x */
145 int dd_pme_maxshift_x(const gmx_domdec_t *dd);
147 /*! \brief Returns the maximum shift for coordinate communication in PME, dim y */
148 int dd_pme_maxshift_y(const gmx_domdec_t *dd);
150 /*! \brief The options for the domain decomposition MPI task ordering. */
151 enum class DdRankOrder
153 select, //!< First value (needed to cope with command-line parsing)
154 interleave, //!< Interleave the PP and PME ranks
155 pp_pme, //!< First all PP ranks, all PME rank at the end
156 cartesian, //!< Use Cartesian communicators for PP, PME and PP-PME
157 nr //!< The number of options
160 /*! \brief The options for the dynamic load balancing. */
161 enum class DlbOption
163 select, //!< First value (needed to cope with command-line parsing)
164 turnOnWhenUseful, //!< Turn on DLB when we think it would improve performance
165 no, //!< Never turn on DLB
166 yes, //!< Turn on DLB from the start and keep it on
167 nr //!< The number of options
170 /*! \libinternal \brief Structure containing all (command line) options for the domain decomposition */
171 struct DomdecOptions
173 /*! \brief Constructor */
174 DomdecOptions();
176 //! If true, check that all bonded interactions have been assigned to exactly one domain/rank.
177 gmx_bool checkBondedInteractions;
178 //! If true, don't communicate all atoms between the non-bonded cut-off and the larger bonded cut-off, but only those that have non-local bonded interactions. This significantly reduces the communication volume.
179 gmx_bool useBondedCommunication;
180 //! The domain decomposition grid cell count, 0 means let domdec choose based on the number of ranks.
181 ivec numCells;
182 //! The number of separate PME ranks requested, -1 = auto.
183 int numPmeRanks;
184 //! Ordering of the PP and PME ranks, values from enum above.
185 DdRankOrder rankOrder;
186 //! The minimum communication range, used for extended the communication range for bonded interactions (nm).
187 real minimumCommunicationRange;
188 //! Communication range for atom involved in constraints (P-LINCS) (nm).
189 real constraintCommunicationRange;
190 //! Dynamic load balancing option, values from enum above.
191 DlbOption dlbOption;
192 /*! \brief Fraction in (0,1) by whose reciprocal the initial
193 * DD cell size will be increased in order to provide a margin
194 * in which dynamic load balancing can act, while preserving
195 * the minimum cell size. */
196 real dlbScaling;
197 //! String containing a vector of the relative sizes in the x direction of the corresponding DD cells.
198 const char *cellSizeX;
199 //! String containing a vector of the relative sizes in the y direction of the corresponding DD cells.
200 const char *cellSizeY;
201 //! String containing a vector of the relative sizes in the z direction of the corresponding DD cells.
202 const char *cellSizeZ;
205 /*! \brief Initialized the domain decomposition, chooses the DD grid and PME ranks, return the DD struct */
206 gmx_domdec_t *
207 init_domain_decomposition(FILE *fplog,
208 t_commrec *cr,
209 const DomdecOptions &options,
210 const MdrunOptions &mdrunOptions,
211 const gmx_mtop_t *mtop,
212 const t_inputrec *ir,
213 const matrix box,
214 gmx::ArrayRef<const gmx::RVec> xGlobal,
215 gmx::LocalAtomSetManager *atomSets);
217 /*! \brief Initialize data structures for bonded interactions */
218 void dd_init_bondeds(FILE *fplog,
219 gmx_domdec_t *dd,
220 const gmx_mtop_t *mtop,
221 const gmx_vsite_t *vsite,
222 const t_inputrec *ir,
223 gmx_bool bBCheck,
224 cginfo_mb_t *cginfo_mb);
226 /*! \brief Returns if we need to do pbc for calculating bonded interactions */
227 gmx_bool dd_bonded_molpbc(const gmx_domdec_t *dd, int ePBC);
229 /*! \brief Change the DD non-bonded communication cut-off.
231 * This could fail when trying to increase the cut-off,
232 * then FALSE will be returned and the cut-off is not modified.
234 gmx_bool change_dd_cutoff(struct t_commrec *cr,
235 t_state *state, const t_inputrec *ir,
236 real cutoff_req );
238 /*! \brief Limit DLB to preserve the option of returning to the current cut-off.
240 * Domain boundary changes due to the DD dynamic load balancing can limit
241 * the cut-off distance that can be set in change_dd_cutoff. This function
242 * sets/changes the DLB limit such that using the passed (pair-list) cut-off
243 * should still be possible after subsequently setting a shorter cut-off
244 * with change_dd_cutoff.
246 void set_dd_dlb_max_cutoff(struct t_commrec *cr, real cutoff);
248 /*! \brief Return if we are currently using dynamic load balancing */
249 gmx_bool dd_dlb_is_on(const struct gmx_domdec_t *dd);
251 /*! \brief Return if the DLB lock is set */
252 gmx_bool dd_dlb_is_locked(const struct gmx_domdec_t *dd);
254 /*! \brief Set a lock such that with DLB=auto DLB cannot get turned on */
255 void dd_dlb_lock(struct gmx_domdec_t *dd);
257 /*! \brief Clear a lock such that with DLB=auto DLB may get turned on later */
258 void dd_dlb_unlock(struct gmx_domdec_t *dd);
260 /*! \brief Set up communication for averaging GPU wait times over domains
262 * When domains (PP MPI ranks) share a GPU, the individual GPU wait times
263 * are meaningless, as it depends on the order in which tasks on the same
264 * GPU finish. Therefore there wait times need to be averaged over the ranks
265 * sharing the same GPU. This function sets up the communication for that.
267 void dd_setup_dlb_resource_sharing(t_commrec *cr,
268 int gpu_id);
270 /*! \brief Cycle counter indices used internally in the domain decomposition */
271 enum {
272 ddCyclStep, ddCyclPPduringPME, ddCyclF, ddCyclWaitGPU, ddCyclPME, ddCyclNr
275 /*! \brief Add the wallcycle count to the DD counter */
276 void dd_cycles_add(const gmx_domdec_t *dd, float cycles, int ddCycl);
278 /*! \brief Start the force flop count */
279 void dd_force_flop_start(struct gmx_domdec_t *dd, t_nrnb *nrnb);
281 /*! \brief Stop the force flop count */
282 void dd_force_flop_stop(struct gmx_domdec_t *dd, t_nrnb *nrnb);
284 /*! \brief Return the PME/PP force load ratio, or -1 if nothing was measured.
286 * Should only be called on the DD master node.
288 float dd_pme_f_ratio(struct gmx_domdec_t *dd);
290 /*! \brief Communicate the coordinates to the neighboring cells and do pbc. */
291 void dd_move_x(struct gmx_domdec_t *dd,
292 matrix box,
293 gmx::ArrayRef<gmx::RVec> x,
294 gmx_wallcycle *wcycle);
296 /*! \brief Sum the forces over the neighboring cells.
298 * When fshift!=NULL the shift forces are updated to obtain
299 * the correct virial from the single sum including f.
301 void dd_move_f(struct gmx_domdec_t *dd,
302 gmx::ArrayRef<gmx::RVec> f,
303 rvec *fshift,
304 gmx_wallcycle *wcycle);
306 /*! \brief Communicate a real for each atom to the neighboring cells. */
307 void dd_atom_spread_real(struct gmx_domdec_t *dd, real v[]);
309 /*! \brief Sum the contributions to a real for each atom over the neighboring cells. */
310 void dd_atom_sum_real(struct gmx_domdec_t *dd, real v[]);
312 /*! \brief Partition the system over the nodes.
314 * step is only used for printing error messages.
315 * If bMasterState==TRUE then state_global from the master node is used,
316 * else state_local is redistributed between the nodes.
317 * When f!=NULL, *f will be reallocated to the size of state_local.
319 void dd_partition_system(FILE *fplog,
320 gmx_int64_t step,
321 const t_commrec *cr,
322 gmx_bool bMasterState,
323 int nstglobalcomm,
324 t_state *state_global,
325 const gmx_mtop_t *top_global,
326 const t_inputrec *ir,
327 t_state *state_local,
328 PaddedRVecVector *f,
329 gmx::MDAtoms *mdatoms,
330 gmx_localtop_t *top_local,
331 t_forcerec *fr,
332 gmx_vsite_t *vsite,
333 gmx::Constraints *constr,
334 t_nrnb *nrnb,
335 gmx_wallcycle *wcycle,
336 gmx_bool bVerbose);
338 /*! \brief Reset all the statistics and counters for total run counting */
339 void reset_dd_statistics_counters(struct gmx_domdec_t *dd);
341 /*! \brief Print statistics for domain decomposition communication */
342 void print_dd_statistics(const t_commrec *cr, const t_inputrec *ir, FILE *fplog);
344 /*! \brief Check whether bonded interactions are missing, if appropriate
346 * \param[in] fplog Log file pointer
347 * \param[in] cr Communication object
348 * \param[in] totalNumberOfBondedInteractions Result of the global reduction over the number of bonds treated in each domain
349 * \param[in] top_global Global topology for the error message
350 * \param[in] top_local Local topology for the error message
351 * \param[in] state Global state for the error message
352 * \param[in,out] shouldCheckNumberOfBondedInteractions Whether we should do the check. Always set to false.
354 void checkNumberOfBondedInteractions(FILE *fplog,
355 t_commrec *cr,
356 int totalNumberOfBondedInteractions,
357 const gmx_mtop_t *top_global,
358 const gmx_localtop_t *top_local,
359 const t_state *state,
360 bool *shouldCheckNumberOfBondedInteractions);
362 /* In domdec_con.c */
364 /*! \brief Communicates the virtual site forces, reduces the shift forces when \p fshift != NULL */
365 void dd_move_f_vsites(struct gmx_domdec_t *dd, rvec *f, rvec *fshift);
367 /*! \brief Clears the forces for virtual sites */
368 void dd_clear_f_vsites(struct gmx_domdec_t *dd, rvec *f);
370 /*! \brief Move x0 and also x1 if x1!=NULL. bX1IsCoord tells if to do PBC on x1 */
371 void dd_move_x_constraints(struct gmx_domdec_t *dd, matrix box,
372 rvec *x0, rvec *x1, gmx_bool bX1IsCoord);
374 /*! \brief Communicates the coordinates involved in virtual sites */
375 void dd_move_x_vsites(struct gmx_domdec_t *dd, const matrix box, rvec *x);
377 /*! \brief Returns the local atom count array for all constraints
379 * The local atom count for a constraint, possible values 2/1/0, is needed
380 * to avoid not/double-counting contributions linked to the Lagrange
381 * multiplier, such as the virial and free-energy derivatives.
383 * \note When \p dd = nullptr, an empty reference is returned.
385 gmx::ArrayRef<const int> dd_constraints_nlocalatoms(const gmx_domdec_t *dd);
387 /* In domdec_top.c */
389 /*! \brief Print error output when interactions are missing */
390 [[ noreturn ]]
391 void dd_print_missing_interactions(FILE *fplog, struct t_commrec *cr,
392 int local_count,
393 const gmx_mtop_t *top_global,
394 const gmx_localtop_t *top_local,
395 const t_state *state_local);
397 /*! \brief Generate and store the reverse topology */
398 void dd_make_reverse_top(FILE *fplog,
399 gmx_domdec_t *dd, const gmx_mtop_t *mtop,
400 const gmx_vsite_t *vsite,
401 const t_inputrec *ir, gmx_bool bBCheck);
403 /*! \brief Store the local charge group index in \p lcgs */
404 void dd_make_local_cgs(struct gmx_domdec_t *dd, t_block *lcgs);
406 /*! \brief Generate the local topology and virtual site data */
407 void dd_make_local_top(struct gmx_domdec_t *dd, struct gmx_domdec_zones_t *zones,
408 int npbcdim, matrix box,
409 rvec cellsize_min, const ivec npulse,
410 t_forcerec *fr,
411 rvec *cgcm_or_x,
412 gmx_vsite_t *vsite,
413 const gmx_mtop_t *top, gmx_localtop_t *ltop);
415 /*! \brief Sort ltop->ilist when we are doing free energy. */
416 void dd_sort_local_top(gmx_domdec_t *dd, const t_mdatoms *mdatoms,
417 gmx_localtop_t *ltop);
419 /*! \brief Construct local topology */
420 gmx_localtop_t *dd_init_local_top(const gmx_mtop_t *top_global);
422 /*! \brief Construct local state */
423 void dd_init_local_state(struct gmx_domdec_t *dd,
424 t_state *state_global, t_state *local_state);
426 /*! \brief Generate a list of links between charge groups that are linked by bonded interactions */
427 t_blocka *make_charge_group_links(const gmx_mtop_t *mtop, gmx_domdec_t *dd,
428 cginfo_mb_t *cginfo_mb);
430 /*! \brief Calculate the maximum distance involved in 2-body and multi-body bonded interactions */
431 void dd_bonded_cg_distance(FILE *fplog, const gmx_mtop_t *mtop,
432 const t_inputrec *ir,
433 const rvec *x, const matrix box,
434 gmx_bool bBCheck,
435 real *r_2b, real *r_mb);
437 /*! \brief Dump a pdb file with the current DD home + communicated atoms.
439 * When natoms=-1, dump all known atoms.
441 void write_dd_pdb(const char *fn, gmx_int64_t step, const char *title,
442 const gmx_mtop_t *mtop,
443 const t_commrec *cr,
444 int natoms, const rvec x[], const matrix box);
447 /* In domdec_setup.c */
449 /*! \brief Returns the volume fraction of the system that is communicated */
450 real comm_box_frac(const ivec dd_nc, real cutoff, const gmx_ddbox_t *ddbox);
452 /*! \brief Determines the optimal DD cell setup dd->nc and possibly npmenodes
453 * for the system.
455 * On the master node returns the actual cellsize limit used.
457 real dd_choose_grid(FILE *fplog,
458 t_commrec *cr, gmx_domdec_t *dd,
459 const t_inputrec *ir,
460 const gmx_mtop_t *mtop,
461 const matrix box, const gmx_ddbox_t *ddbox,
462 int nPmeRanks,
463 gmx_bool bDynLoadBal, real dlb_scale,
464 real cellsize_limit, real cutoff_dd,
465 gmx_bool bInterCGBondeds);
468 /* In domdec_box.c */
470 /*! \brief Set the box and PBC data in \p ddbox */
471 void set_ddbox(gmx_domdec_t *dd, bool masterRankHasTheSystemState,
472 const t_inputrec *ir, const matrix box,
473 bool calculateUnboundedSize,
474 gmx::ArrayRef<const gmx::RVec> x,
475 gmx_ddbox_t *ddbox);
477 /*! \brief Set the box and PBC data in \p ddbox */
478 void set_ddbox_cr(const t_commrec *cr, const ivec *dd_nc,
479 const t_inputrec *ir, const matrix box,
480 gmx::ArrayRef<const gmx::RVec> x,
481 gmx_ddbox_t *ddbox);
483 #endif