PISM, A Parallel Ice Sheet Model  stable v2.0.6 committed by Constantine Khrulev on 2023-01-23 15:14:38 -0900
iceModelVec.cc
Go to the documentation of this file.
1 // Copyright (C) 2008--2022 Ed Bueler, Constantine Khroulev, and David Maxwell
2 //
3 // This file is part of PISM.
4 //
5 // PISM is free software; you can redistribute it and/or modify it under the
6 // terms of the GNU General Public License as published by the Free Software
7 // Foundation; either version 3 of the License, or (at your option) any later
8 // version.
9 //
10 // PISM is distributed in the hope that it will be useful, but WITHOUT ANY
11 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12 // FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
13 // details.
14 //
15 // You should have received a copy of the GNU General Public License
16 // along with PISM; if not, write to the Free Software
17 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18 
19 #include <cassert>
20 
21 #include <petscdraw.h>
22 
23 #include "iceModelVec.hh"
24 #include "pism/util/IceModelVec2V.hh"
25 #include "pism/util/IceModelVec_impl.hh"
26 
27 #include "Time.hh"
28 #include "IceGrid.hh"
29 #include "ConfigInterface.hh"
30 
31 #include "error_handling.hh"
32 #include "io/io_helpers.hh"
33 #include "pism/util/Logger.hh"
34 #include "pism/util/Profiling.hh"
35 #include "pism/util/petscwrappers/VecScatter.hh"
36 #include "pism/util/petscwrappers/Viewer.hh"
37 #include "pism/util/Mask.hh"
38 #include "pism/util/IceModelVec2CellType.hh"
39 #include "pism/util/Context.hh"
40 #include "pism/util/VariableMetadata.hh"
41 #include "pism/util/io/File.hh"
42 #include "pism_utilities.hh"
43 
44 namespace pism {
45 
46 static void global_to_local(petsc::DM &dm, Vec source, Vec destination) {
47  PetscErrorCode ierr;
48 
49  ierr = DMGlobalToLocalBegin(dm, source, INSERT_VALUES, destination);
50  PISM_CHK(ierr, "DMGlobalToLocalBegin");
51 
52  ierr = DMGlobalToLocalEnd(dm, source, INSERT_VALUES, destination);
53  PISM_CHK(ierr, "DMGlobalToLocalEnd");
54 }
55 
57  const std::string &name,
58  IceModelVecKind ghostedp,
59  size_t dof,
60  size_t stencil_width,
61  const std::vector<double> &zlevels) {
62  m_impl = new Impl();
63  m_array = nullptr;
64 
65  m_impl->name = name;
66  m_impl->grid = grid;
67  m_impl->ghosted = (ghostedp == WITH_GHOSTS);
68  m_impl->dof = dof;
69  m_impl->zlevels = zlevels;
70 
71  auto max_stencil_width = grid->ctx()->config()->get_number("grid.max_stencil_width");
72  if ((dof != 1) or (stencil_width > max_stencil_width)) {
73  // use the requested stencil width *if* we have to
75  } else {
76  // otherwise use the "standard" stencil width
77  m_impl->da_stencil_width = max_stencil_width;
78  }
79 
80  auto system = m_impl->grid->ctx()->unit_system();
81  if (m_impl->dof > 1) {
82  // dof > 1: this is a 2D vector
83  using pism::printf;
84  for (unsigned int j = 0; j < m_impl->dof; ++j) {
85  m_impl->metadata.push_back({system, printf("%s[%d]", name.c_str(), j)});
86  }
87  } else {
88  // both 2D and 3D vectors
89  m_impl->metadata = {{system, name, zlevels}};
90  }
91 
92  if (zlevels.size() > 1) {
93  m_impl->bsearch_accel = gsl_interp_accel_alloc();
94  if (m_impl->bsearch_accel == NULL) {
96  "Failed to allocate a GSL interpolation accelerator");
97  }
98  }
99 }
100 
102  assert(m_impl->access_counter == 0);
103 
104  if (m_impl->bsearch_accel != nullptr) {
105  gsl_interp_accel_free(m_impl->bsearch_accel);
106  m_impl->bsearch_accel = nullptr;
107  }
108 
109  delete m_impl;
110  m_impl = nullptr;
111 }
112 
113 //! \brief Get the object state counter.
114 /*!
115  * This method returns the "revision number" of an IceModelVec.
116  *
117  * It can be used to determine it a field was updated and if a certain
118  * computation needs to be re-done. One example is computing the smoothed bed
119  * for the SIA computation, which is only necessary if the bed deformation code
120  * fired.
121  *
122  * See also inc_state_counter().
123  */
125  return m_impl->state_counter;
126 }
127 
129  return m_impl->grid;
130 }
131 
132 unsigned int IceModelVec::ndof() const {
133  return m_impl->dof;
134 }
135 
136 std::vector<double> IceModelVec::levels() const {
137  return m_impl->zlevels;
138 }
139 
140 //! \brief Increment the object state counter.
141 /*!
142  * See the documentation of get_state_counter(). This method is the
143  * *only* way to manually increment the state counter. It is also
144  * automatically updated by IceModelVec methods that are known to
145  * change stored values.
146  */
149 }
150 
151 //! Returns the number of spatial dimensions.
152 unsigned int IceModelVec::ndims() const {
153  return m_impl->zlevels.size() > 1 ? 3 : 2;
154 }
155 
156 std::vector<int> IceModelVec::shape() const {
157 
158  auto grid = m_impl->grid;
159 
160  if (ndims() == 3) {
161  return {(int)grid->My(), (int)grid->Mx(), (int)levels().size()};
162  }
163 
164  if (ndof() == 1) {
165  return {(int)grid->My(), (int)grid->Mx()};
166  }
167 
168  return {(int)grid->My(), (int)grid->Mx(), (int)ndof()};
169 }
170 
171 //! Set the time independent flag for all variables corresponding to this IceModelVec instance.
172 /** A "time independent" IceModelVec will be saved to a NetCDF
173  variable which does not depend on the "time" dimension.
174  */
176  for (unsigned int j = 0; j < m_impl->dof; ++j) {
177  m_impl->metadata[j].set_time_independent(flag);
178  }
179 }
180 
183 }
184 
185 
186 //! Result: min <- min(v[j]), max <- max(v[j]).
187 /*!
188 PETSc manual correctly says "VecMin and VecMax are collective on Vec" but
189 GlobalMax,GlobalMin \e are needed, when m_impl->ghosted==true, to get correct
190 values because Vecs created with DACreateLocalVector() are of type
191 VECSEQ and not VECMPI. See src/trypetsc/localVecMax.c.
192  */
193 std::array<double,2> IceModelVec::range() const {
194  PetscErrorCode ierr;
195 
196  double min{0.0};
197  ierr = VecMin(vec(), NULL, &min);
198  PISM_CHK(ierr, "VecMin");
199 
200  double max{0.0};
201  ierr = VecMax(vec(), NULL, &max);
202  PISM_CHK(ierr, "VecMax");
203 
204  if (m_impl->ghosted) {
205  // needs a reduce operation; use GlobalMin and GlobalMax;
206  min = GlobalMin(m_impl->grid->com, min);
207  max = GlobalMax(m_impl->grid->com, max);
208  }
209  return {min, max};
210 }
211 
212 /** Convert from `int` to PETSc's `NormType`.
213  *
214  *
215  * @param[in] input norm type as an integer
216  *
217  * @return norm type as PETSc's `NormType`.
218  */
219 static NormType int_to_normtype(int input) {
220  switch (input) {
221  case NORM_1:
222  return NORM_1;
223  case NORM_2:
224  return NORM_2;
225  case NORM_INFINITY:
226  return NORM_INFINITY;
227  default:
228  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid norm type: %d",
229  input);
230  }
231 }
232 
233 //! Result: v <- v + alpha * x. Calls VecAXPY.
234 void IceModelVec::add(double alpha, const IceModelVec &x) {
235  checkCompatibility("add", x);
236 
237  PetscErrorCode ierr = VecAXPY(vec(), alpha, x.vec());
238  PISM_CHK(ierr, "VecAXPY");
239 
240  inc_state_counter(); // mark as modified
241 }
242 
243 //! Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
244 void IceModelVec::shift(double alpha) {
245  PetscErrorCode ierr = VecShift(vec(), alpha);
246  PISM_CHK(ierr, "VecShift");
247 
248  inc_state_counter(); // mark as modified
249 }
250 
251 //! Result: v <- v * alpha. Calls VecScale.
252 void IceModelVec::scale(double alpha) {
253  PetscErrorCode ierr = VecScale(vec(), alpha);
254  PISM_CHK(ierr, "VecScale");
255 
256  inc_state_counter(); // mark as modified
257 }
258 
259 //! Copies v to a global vector 'destination'. Ghost points are discarded.
260 /*! This is potentially dangerous: make sure that `destination` has the same
261  dimensions as the current IceModelVec.
262 
263  DMLocalToGlobalBegin/End is broken in PETSc 3.5, so we roll our
264  own.
265  */
266 void IceModelVec::copy_to_vec(std::shared_ptr<petsc::DM> destination_da,
267  petsc::Vec &destination) const {
268  // m_dof > 1 for vector, staggered grid 2D fields, etc. In this case
269  // zlevels.size() == 1. For 3D fields, m_dof == 1 (all 3D fields are
270  // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
271  // DM object. So we want the bigger of the two numbers here.
272  unsigned int N = std::max((size_t)m_impl->dof, m_impl->zlevels.size());
273 
274  this->get_dof(destination_da, destination, 0, N);
275 }
276 
277 void IceModelVec::get_dof(std::shared_ptr<petsc::DM> da_result,
278  petsc::Vec &result,
279  unsigned int start, unsigned int count) const {
280  if (start >= m_impl->dof) {
281  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
282  }
283 
284  petsc::DMDAVecArrayDOF tmp_res(da_result, result), tmp_v(dm(), vec());
285 
286  double
287  ***result_a = static_cast<double***>(tmp_res.get()),
288  ***source_a = static_cast<double***>(tmp_v.get());
289 
290  ParallelSection loop(m_impl->grid->com);
291  try {
292  for (Points p(*m_impl->grid); p; p.next()) {
293  const int i = p.i(), j = p.j();
294  PetscErrorCode ierr = PetscMemcpy(result_a[j][i], &source_a[j][i][start],
295  count*sizeof(PetscScalar));
296  PISM_CHK(ierr, "PetscMemcpy");
297  }
298  } catch (...) {
299  loop.failed();
300  }
301  loop.check();
302 }
303 
304 void IceModelVec::set_dof(std::shared_ptr<petsc::DM> da_source, petsc::Vec &source,
305  unsigned int start, unsigned int count) {
306  if (start >= m_impl->dof) {
307  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
308  }
309 
310  petsc::DMDAVecArrayDOF tmp_src(da_source, source), tmp_v(dm(), vec());
311 
312  double
313  ***source_a = static_cast<double***>(tmp_src.get()),
314  ***result_a = static_cast<double***>(tmp_v.get());
315 
316  ParallelSection loop(m_impl->grid->com);
317  try {
318  for (Points p(*m_impl->grid); p; p.next()) {
319  const int i = p.i(), j = p.j();
320  PetscErrorCode ierr = PetscMemcpy(&result_a[j][i][start], source_a[j][i],
321  count*sizeof(PetscScalar));
322  PISM_CHK(ierr, "PetscMemcpy");
323  }
324  } catch (...) {
325  loop.failed();
326  }
327  loop.check();
328 
329  inc_state_counter(); // mark as modified
330 }
331 
332 //! @brief Get the stencil width of the current IceModelVec. Returns 0
333 //! if ghosts are not available.
334 unsigned int IceModelVec::stencil_width() const {
335  if (m_impl->ghosted) {
336  return m_impl->da_stencil_width;
337  }
338 
339  return 0;
340 }
341 
343  if (m_impl->v.get() == nullptr) {
344  PetscErrorCode ierr = 0;
345  if (m_impl->ghosted) {
346  ierr = DMCreateLocalVector(*dm(), m_impl->v.rawptr());
347  PISM_CHK(ierr, "DMCreateLocalVector");
348  } else {
349  ierr = DMCreateGlobalVector(*dm(), m_impl->v.rawptr());
350  PISM_CHK(ierr, "DMCreateGlobalVector");
351  }
352  }
353  return m_impl->v;
354 }
355 
356 std::shared_ptr<petsc::DM> IceModelVec::dm() const {
357  if (m_impl->da == nullptr) {
358  // dof > 1 for vector, staggered grid 2D fields, etc. In this case zlevels.size() ==
359  // 1. For 3D fields, dof == 1 (all 3D fields are scalar) and zlevels.size()
360  // corresponds to dof of the underlying PETSc DM object.
361  auto da_dof = std::max(m_impl->zlevels.size(), (size_t)m_impl->dof);
362 
363  // initialize the da member:
364  m_impl->da = grid()->get_dm(da_dof, m_impl->da_stencil_width);
365  }
366  return m_impl->da;
367 }
368 
369 //! Sets the variable name to `name`.
370 /**
371  * This is the "overall" name of a field. This is **not** the same as the
372  * NetCDF variable name. Use `metadata(...).set_name(...)` to set that.
373  */
374 void IceModelVec::set_name(const std::string &name) {
375  m_impl->name = name;
376 }
377 
378 //! @brief Get the name of an IceModelVec object.
379 /**
380  * This is the name used to refer to this object in PISM (e.g. via the
381  * Vars class), **not** the name of the corresponding NetCDF variable.
382  * (The problem is that one IceModelVec instance may correspond to
383  * several NetCDF variables. Use metadata(...).get_name() to get the
384  * name of NetCDF variables an IceModelVec is saved to.)
385  */
386 const std::string& IceModelVec::get_name() const {
387  return m_impl->name;
388 }
389 
390 //! Sets NetCDF attributes of an IceModelVec object.
391 /*! Call set_attrs("new pism_intent", "new long name", "new units", "") if a
392  variable does not have a standard name. Similarly, by putting "" in an
393  appropriate spot, it is possible tp leave long_name, units or pism_intent
394  unmodified.
395 
396  If units != "", this also resets glaciological_units, so that they match
397  internal units.
398  */
399 void IceModelVec::set_attrs(const std::string &pism_intent,
400  const std::string &long_name,
401  const std::string &units,
402  const std::string &glaciological_units,
403  const std::string &standard_name,
404  unsigned int component) {
405 
406  metadata(component)["long_name"] = long_name;
407 
408  metadata(component)["units"] = units;
409 
410  if (not m_impl->grid->ctx()->config()->get_flag("output.use_MKS")) {
411  metadata(component)["glaciological_units"] = glaciological_units;
412  }
413 
414  metadata(component)["pism_intent"] = pism_intent;
415 
416  metadata(component)["standard_name"] = standard_name;
417 }
418 
419 //! Gets an IceModelVec from a file `file`, interpolating onto the current grid.
420 /*! Stops if the variable was not found and `critical` == true.
421  */
423  double default_value) {
424 
425  bool allow_extrapolation = grid()->ctx()->config()->get_flag("grid.allow_extrapolation");
426 
427  if (ndof() == 1) {
428  if (m_impl->ghosted) {
430  petsc::VecArray tmp_array(tmp);
431 
432  io::regrid_spatial_variable(metadata(0), *grid(), file, flag,
433  m_impl->report_range, allow_extrapolation,
434  default_value, m_impl->interpolation_type,
435  tmp_array.get());
436 
437  global_to_local(*dm(), tmp, vec());
438  } else {
439  petsc::VecArray v_array(vec());
440  io::regrid_spatial_variable(metadata(0), *grid(), file, flag,
441  m_impl->report_range, allow_extrapolation,
442  default_value, m_impl->interpolation_type,
443  v_array.get());
444  }
445  return;
446  }
447 
448  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
449  // and we just need a global Vec):
450  auto da2 = grid()->get_dm(1, 0);
451 
452  // a temporary one-component vector, distributed across processors
453  // the same way v is
454  petsc::TemporaryGlobalVec tmp(da2);
455 
456  for (unsigned int j = 0; j < ndof(); ++j) {
457  {
458  petsc::VecArray tmp_array(tmp);
459  io::regrid_spatial_variable(metadata(j), *grid(), file, flag,
460  m_impl->report_range, allow_extrapolation,
461  default_value, m_impl->interpolation_type,
462  tmp_array.get());
463  }
464 
465  set_dof(da2, tmp, j);
466  }
467 
468  // The calls above only set the values owned by a processor, so we need to
469  // communicate if m_has_ghosts == true:
470  if (m_impl->ghosted) {
471  update_ghosts();
472  }
473 }
474 
475 //! Reads appropriate NetCDF variable(s) into an IceModelVec.
476 void IceModelVec::read_impl(const File &file, const unsigned int time) {
477 
478  Logger::ConstPtr log = grid()->ctx()->log();
479  log->message(4, " Reading %s...\n", m_impl->name.c_str());
480 
481  if (ndof() == 1) {
482  // This takes are of scalar variables (both 2D and 3D).
483  if (m_impl->ghosted) {
485 
486  petsc::VecArray tmp_array(tmp);
487  io::read_spatial_variable(metadata(0), *grid(), file, time, tmp_array.get());
488 
489  global_to_local(*dm(), tmp, vec());
490  } else {
491  petsc::VecArray v_array(vec());
492  io::read_spatial_variable(metadata(0), *grid(), file, time, v_array.get());
493  }
494  return;
495  }
496 
497  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
498  // and we just need a global Vec):
499  auto da2 = grid()->get_dm(1, 0);
500 
501  // A temporary one-component vector, distributed across processors
502  // the same way v is
503  petsc::TemporaryGlobalVec tmp(da2);
504 
505  for (unsigned int j = 0; j < ndof(); ++j) {
506 
507  {
508  petsc::VecArray tmp_array(tmp);
509  io::read_spatial_variable(metadata(j), *grid(), file, time, tmp_array.get());
510  }
511 
512  set_dof(da2, tmp, j);
513  }
514 
515  // The calls above only set the values owned by a processor, so we need to
516  // communicate if m_impl->ghosted is true:
517  if (m_impl->ghosted) {
518  update_ghosts();
519  }
520 }
521 
522 //! \brief Define variables corresponding to an IceModelVec in a file opened using `file`.
523 void IceModelVec::define(const File &file, IO_Type default_type) const {
524  for (unsigned int j = 0; j < ndof(); ++j) {
525  IO_Type type = metadata(j).get_output_type();
526  type = type == PISM_NAT ? default_type : type;
527  io::define_spatial_variable(metadata(j), *m_impl->grid, file, type);
528  }
529 }
530 
531 //! @brief Returns a reference to the SpatialVariableMetadata object
532 //! containing metadata for the compoment N.
534  assert(N < m_impl->dof);
535  return m_impl->metadata[N];
536 }
537 
538 const SpatialVariableMetadata& IceModelVec::metadata(unsigned int N) const {
539  assert(N < m_impl->dof);
540  return m_impl->metadata[N];
541 }
542 
543 //! Writes an IceModelVec to a NetCDF file.
544 void IceModelVec::write_impl(const File &file) const {
545  Logger::ConstPtr log = m_impl->grid->ctx()->log();
546 
547  log->message(4, " Writing %s...\n", m_impl->name.c_str());
548 
549  // The simplest case:
550  if (ndof() == 1) {
551  if (m_impl->ghosted) {
553 
554  this->copy_to_vec(dm(), tmp);
555 
556  petsc::VecArray tmp_array(tmp);
557 
558  io::write_spatial_variable(metadata(0), *grid(), file, tmp_array.get());
559  } else {
560  petsc::VecArray v_array(vec());
561  io::write_spatial_variable(metadata(0), *grid(), file, v_array.get());
562  }
563  return;
564  }
565 
566  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
567  // and we just need a global Vec):
568  auto da2 = m_impl->grid->get_dm(1, 0);
569 
570  // a temporary one-component vector, distributed across processors
571  // the same way v is
572  petsc::TemporaryGlobalVec tmp(da2);
573 
574  for (unsigned int j = 0; j < ndof(); ++j) {
575  get_dof(da2, tmp, j);
576 
577  petsc::VecArray tmp_array(tmp);
578  io::write_spatial_variable(metadata(j), *grid(), file, tmp_array.get());
579  }
580 }
581 
582 //! Dumps a variable to a file, overwriting this file's contents (for debugging).
583 void IceModelVec::dump(const char filename[]) const {
584  File file(m_impl->grid->com, filename,
585  string_to_backend(m_impl->grid->ctx()->config()->get_string("output.format")),
587  m_impl->grid->ctx()->pio_iosys_id());
588 
589  if (not m_impl->metadata[0].get_time_independent()) {
590  io::define_time(file, *m_impl->grid->ctx());
591  io::append_time(file, *m_impl->grid->ctx()->config(), m_impl->grid->ctx()->time()->current());
592  }
593 
594  define(file, PISM_DOUBLE);
595  write(file);
596 }
597 
598 //! Checks if two IceModelVecs have compatible sizes, dimensions and numbers of degrees of freedom.
599 void IceModelVec::checkCompatibility(const char* func, const IceModelVec &other) const {
600  PetscErrorCode ierr;
601  // We have to use PetscInt because of VecGetSizes below.
602  PetscInt X_size, Y_size;
603 
604  if (m_impl->dof != other.m_impl->dof) {
605  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "IceModelVec::%s(...): operands have different numbers of degrees of freedom",
606  func);
607  }
608 
609  ierr = VecGetSize(vec(), &X_size);
610  PISM_CHK(ierr, "VecGetSize");
611 
612  ierr = VecGetSize(other.vec(), &Y_size);
613  PISM_CHK(ierr, "VecGetSize");
614 
615  if (X_size != Y_size) {
616  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "IceModelVec::%s(...): incompatible Vec sizes (called as %s.%s(%s))",
617  func, m_impl->name.c_str(), func, other.m_impl->name.c_str());
618  }
619 }
620 
621 //! Checks if an IceModelVec is allocated and calls DAVecGetArray.
623 
624  if (m_impl->access_counter < 0) {
625  throw RuntimeError(PISM_ERROR_LOCATION, "IceModelVec::begin_access(): m_access_counter < 0");
626  }
627 
628  if (m_impl->access_counter == 0) {
629  PetscErrorCode ierr;
631  ierr = DMDAVecGetArrayDOF(*dm(), vec(), &m_array);
632  PISM_CHK(ierr, "DMDAVecGetArrayDOF");
633  } else {
634  ierr = DMDAVecGetArray(*dm(), vec(), &m_array);
635  PISM_CHK(ierr, "DMDAVecGetArray");
636  }
637  }
638 
640 }
641 
642 //! Checks if an IceModelVec is allocated and calls DAVecRestoreArray.
644  PetscErrorCode ierr;
645 
646  if (m_array == NULL) {
648  "IceModelVec::end_access(): a == NULL (looks like begin_acces() was not called)");
649  }
650 
651  if (m_impl->access_counter < 0) {
652  throw RuntimeError(PISM_ERROR_LOCATION, "IceModelVec::end_access(): m_access_counter < 0");
653  }
654 
656  if (m_impl->access_counter == 0) {
658  ierr = DMDAVecRestoreArrayDOF(*dm(), vec(), &m_array);
659  PISM_CHK(ierr, "DMDAVecRestoreArrayDOF");
660  } else {
661  ierr = DMDAVecRestoreArray(*dm(), vec(), &m_array);
662  PISM_CHK(ierr, "DMDAVecRestoreArray");
663  }
664  m_array = NULL;
665  }
666 }
667 
668 //! Updates ghost points.
670  PetscErrorCode ierr;
671  if (not m_impl->ghosted) {
672  return;
673  }
674 
675  ierr = DMLocalToLocalBegin(*dm(), vec(), INSERT_VALUES, vec());
676  PISM_CHK(ierr, "DMLocalToLocalBegin");
677 
678  ierr = DMLocalToLocalEnd(*dm(), vec(), INSERT_VALUES, vec());
679  PISM_CHK(ierr, "DMLocalToLocalEnd");
680 }
681 
682 //! Result: v[j] <- c for all j.
683 void IceModelVec::set(const double c) {
684  PetscErrorCode ierr = VecSet(vec(),c);
685  PISM_CHK(ierr, "VecSet");
686 
687  inc_state_counter(); // mark as modified
688 }
689 
690 void IceModelVec::check_array_indices(int i, int j, unsigned int k) const {
691  double ghost_width = 0;
692  if (m_impl->ghosted) {
693  ghost_width = m_impl->da_stencil_width;
694  }
695  // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
696  // zlevels.size() == 1. For 3D fields, m_impl->dof == 1 (all 3D fields are
697  // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
698  // DM object. So we want the bigger of the two numbers here.
699  unsigned int N = std::max((size_t)m_impl->dof, m_impl->zlevels.size());
700 
701  bool out_of_range = (i < m_impl->grid->xs() - ghost_width) ||
702  (i > m_impl->grid->xs() + m_impl->grid->xm() + ghost_width) ||
703  (j < m_impl->grid->ys() - ghost_width) ||
704  (j > m_impl->grid->ys() + m_impl->grid->ym() + ghost_width) ||
705  (k >= N);
706 
707  if (out_of_range) {
708  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s(%d, %d, %d) is out of bounds",
709  m_impl->name.c_str(), i, j, k);
710  }
711 
712  if (m_array == NULL) {
713  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s: begin_access() was not called", m_impl->name.c_str());
714  }
715 }
716 
717 namespace vec {
718 namespace details {
719 //! \brief Compute parameters for 2D loop computations involving 3
720 //! IceModelVecs.
721 /*!
722  * Here we assume that z is updated using a local (point-wise) computation
723  * involving x and y.
724  *
725  * "ghosts" is the width of the stencil that can be updated locally.
726  * "scatter" is false if all ghosts can be updated locally.
727  */
728 void compute_params(const IceModelVec* const x, const IceModelVec* const y,
729  const IceModelVec* const z, int &ghosts, bool &scatter) {
730 
731  // We have 2^3=8 cases here (x,y,z having or not having ghosts).
732  if (z->stencil_width() == 0) {
733  // z has no ghosts; we can update everything locally
734  // (This covers 4 cases.)
735  ghosts = 0;
736  scatter = false;
737  } else if (x->stencil_width() == 0 ||
738  y->stencil_width() == 0) {
739  // z has ghosts, but at least one of x and y does not. we have to scatter
740  // ghosts.
741  // (This covers 3 cases.)
742  ghosts = 0;
743  scatter = true;
744  } else {
745  // all of x, y, z have ghosts
746  // (The remaining 8-th case.)
747  if (z->stencil_width() <= x->stencil_width() &&
748  z->stencil_width() <= y->stencil_width()) {
749  // x and y have enough ghosts to update ghosts of z locally
750  ghosts = z->stencil_width();
751  scatter = false;
752  } else {
753  // z has ghosts, but at least one of x and y doesn't have a wide enough
754  // stencil
755  ghosts = 0;
756  scatter = true;
757  }
758  }
759 }
760 
761 } // end of namespace details
762 } // end of namespace vec
763 
764 //! Computes the norm of all the components of an IceModelVec.
765 /*!
766 See comment for range(); because local Vecs are VECSEQ, needs a reduce operation.
767 See src/trypetsc/localVecMax.c.
768  */
769 std::vector<double> IceModelVec::norm(int n) const {
770  std::vector<double> result(m_impl->dof);
771 
772  NormType type = int_to_normtype(n);
773 
774  if (m_impl->dof > 1) {
775  PetscErrorCode ierr = VecStrideNormAll(vec(), type, &result[0]);
776  PISM_CHK(ierr, "VecStrideNormAll");
777  } else {
778  PetscErrorCode ierr = VecNorm(vec(), type, &result[0]);
779  PISM_CHK(ierr, "VecNorm");
780  }
781 
782  if (m_impl->ghosted) {
783  // needs a reduce operation; use GlobalMax() if NORM_INFINITY,
784  // otherwise GlobalSum; carefully in NORM_2 case
785  switch (type) {
786  case NORM_1_AND_2: {
787  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "IceModelVec::norm_all(...): NORM_1_AND_2"
788  " not implemented (called as %s.norm_all(...))",
789  m_impl->name.c_str());
790 
791  }
792  case NORM_1: {
793  for (unsigned int k = 0; k < m_impl->dof; ++k) {
794  result[k] = GlobalSum(m_impl->grid->com, result[k]);
795  }
796  return result;
797  }
798  case NORM_2: {
799  for (unsigned int k = 0; k < m_impl->dof; ++k) {
800  // undo sqrt in VecNorm before sum; sum up; take sqrt
801  result[k] = sqrt(GlobalSum(m_impl->grid->com, result[k]*result[k]));
802  }
803  return result;
804  }
805  case NORM_INFINITY: {
806  for (unsigned int k = 0; k < m_impl->dof; ++k) {
807  result[k] = GlobalMax(m_impl->grid->com, result[k]);
808  }
809  return result;
810  }
811  default: {
812  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "IceModelVec::norm_all(...): unknown norm type"
813  " (called as %s.norm_all(...))",
814  m_impl->name.c_str());
815  }
816  } // switch
817  } else {
818  return result;
819  }
820 }
821 
822 void IceModelVec::write(const std::string &filename) const {
823  // We expect the file to be present and ready to write into.
824  File file(m_impl->grid->com,
825  filename,
826  string_to_backend(m_impl->grid->ctx()->config()->get_string("output.format")),
828  m_impl->grid->ctx()->pio_iosys_id());
829 
830  this->write(file);
831 }
832 
833 void IceModelVec::read(const std::string &filename, unsigned int time) {
834  File file(m_impl->grid->com, filename, PISM_GUESS, PISM_READONLY);
835  this->read(file, time);
836 }
837 
838 void IceModelVec::regrid(const std::string &filename, RegriddingFlag flag,
839  double default_value) {
840  File file(m_impl->grid->com, filename, PISM_GUESS, PISM_READONLY);
841 
842  try {
843  this->regrid(file, flag, default_value);
844  } catch (RuntimeError &e) {
845  e.add_context("regridding '%s' from '%s'",
846  this->get_name().c_str(), filename.c_str());
847  throw;
848  }
849 }
850 
851 /** Read a field from a file, interpolating onto the current grid.
852  *
853  * When `flag` is set to `CRITICAL`, stop if could not find the variable
854  * in the provided input file; `default_value` is ignored.
855  *
856  * When `flag` is set to `OPTIONAL`, fill this IceModelVec with
857  * `default_value` if could not find the variable in the provided
858  * input file.
859  *
860  * When `flag` is set to `CRITICAL_FILL_MISSING`, replace missing
861  * values matching the `_FillValue` attribute with `default_value`,
862  * stop if could not find the variable.
863  *
864  * When `flag` is set to `OPTIONAL_FILL_MISSING`, replace missing
865  * values matching the `_FillValue` attribute with `default_value`;
866  * fill the whole IceModelVec with `default_value` if could not find
867  * the variable.
868  *
869  * @param file input file
870  * @param flag regridding mode, see above
871  * @param default_value default value, meaning depends on the
872  * regridding mode flag
873  *
874  * @return 0 on success
875  */
876 void IceModelVec::regrid(const File &file, RegriddingFlag flag,
877  double default_value) {
878  m_impl->grid->ctx()->log()->message(3, " [%s] Regridding %s...\n",
879  timestamp(m_impl->grid->com).c_str(), m_impl->name.c_str());
880  double start_time = get_time();
881  m_impl->grid->ctx()->profiling().begin("io.regridding");
882  try {
883  this->regrid_impl(file, flag, default_value);
884  inc_state_counter(); // mark as modified
885  } catch (RuntimeError &e) {
886  e.add_context("regridding '%s' from '%s'",
887  this->get_name().c_str(), file.filename().c_str());
888  throw;
889  }
890  m_impl->grid->ctx()->profiling().end("io.regridding");
891  double
892  end_time = get_time(),
893  time_spent = end_time - start_time;
894 
895  if (time_spent > 1.0) {
896  m_impl->grid->ctx()->log()->message(3, " done in %f seconds.\n", time_spent);
897  } else {
898  m_impl->grid->ctx()->log()->message(3, " done.\n");
899  }
900 }
901 
902 void IceModelVec::read(const File &file, const unsigned int time) {
903  this->read_impl(file, time);
904  inc_state_counter(); // mark as modified
905 }
906 
907 void IceModelVec::write(const File &file) const {
908  define(file);
909 
910  m_impl->grid->ctx()->log()->message(3, " [%s] Writing %s...",
911  timestamp(m_impl->grid->com).c_str(),
912  m_impl->name.c_str());
913 
914  double start_time = get_time();
915  write_impl(file);
916  double end_time = get_time();
917 
918  const double
919  time_spent = end_time - start_time,
920  megabyte = pow(2, 20),
921  mb_double = sizeof(double) * size() / megabyte,
922  mb_float = sizeof(float) * size() / megabyte;
923 
924  std::string timestamp = pism::timestamp(m_impl->grid->com);
925  std::string spacer(timestamp.size(), ' ');
926  if (time_spent > 1) {
927  m_impl->grid->ctx()->log()->message(3,
928  "\n"
929  " [%s] Done writing %s (%f Mb double, %f Mb float)\n"
930  " %s in %f seconds (%f minutes).\n"
931  " %s Effective throughput: double: %f Mb/s, float: %f Mb/s.\n",
932  timestamp.c_str(), m_impl->name.c_str(), mb_double, mb_float,
933  spacer.c_str(), time_spent, time_spent / 60.0,
934  spacer.c_str(),
935  mb_double / time_spent, mb_float / time_spent);
936  } else {
937  m_impl->grid->ctx()->log()->message(3, " done.\n");
938  }
939 }
940 
942  // empty
943 }
944 
946  while (not m_vecs.empty()) {
947  try {
948  m_vecs.back()->end_access();
949  m_vecs.pop_back();
950  } catch (...) {
951  handle_fatal_errors(MPI_COMM_SELF);
952  }
953  }
954 }
955 
956 AccessList::AccessList(std::initializer_list<const PetscAccessible *> vecs) {
957  for (const auto *j : vecs) {
958  assert(j != nullptr);
959  add(*j);
960  }
961 }
962 
964  add(vec);
965 }
966 
968  vec.begin_access();
969  m_vecs.push_back(&vec);
970 }
971 
972 void AccessList::add(const std::vector<const PetscAccessible*> &vecs) {
973  for (const auto *v : vecs) {
974  assert(v != nullptr);
975  add(*v);
976  }
977 }
978 
979 //! Return the total number of elements in the *owned* part of an array.
980 size_t IceModelVec::size() const {
981  // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
982  // zlevels.size() == 1. For 3D fields, m_impl->dof == 1 (all 3D fields are
983  // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
984  // DM object.
985 
986  size_t
987  Mx = m_impl->grid->Mx(),
988  My = m_impl->grid->My(),
989  Mz = m_impl->zlevels.size(),
990  dof = m_impl->dof;
991 
992  return Mx * My * Mz * dof;
993 }
994 
995 /*! Allocate a copy on processor zero and the scatter needed to move data.
996  */
997 std::shared_ptr<petsc::Vec> IceModelVec::allocate_proc0_copy() const {
998  PetscErrorCode ierr;
999  Vec v_proc0 = NULL;
1000  Vec result = NULL;
1001 
1002  ierr = PetscObjectQuery((PetscObject)dm()->get(), "v_proc0", (PetscObject*)&v_proc0);
1003  PISM_CHK(ierr, "PetscObjectQuery")
1004  ;
1005  if (v_proc0 == NULL) {
1006 
1007  // natural_work will be destroyed at the end of scope, but it will
1008  // only decrement the reference counter incremented by
1009  // PetscObjectCompose below.
1010  petsc::Vec natural_work;
1011  // create a work vector with natural ordering:
1012  ierr = DMDACreateNaturalVector(*dm(), natural_work.rawptr());
1013  PISM_CHK(ierr, "DMDACreateNaturalVector");
1014 
1015  // this increments the reference counter of natural_work
1016  ierr = PetscObjectCompose((PetscObject)dm()->get(), "natural_work",
1017  (PetscObject)((::Vec)natural_work));
1018  PISM_CHK(ierr, "PetscObjectCompose");
1019 
1020  // scatter_to_zero will be destroyed at the end of scope, but it
1021  // will only decrement the reference counter incremented by
1022  // PetscObjectCompose below.
1023  petsc::VecScatter scatter_to_zero;
1024 
1025  // initialize the scatter to processor 0 and create storage on processor 0
1026  ierr = VecScatterCreateToZero(natural_work, scatter_to_zero.rawptr(),
1027  &v_proc0);
1028  PISM_CHK(ierr, "VecScatterCreateToZero");
1029 
1030  // this increments the reference counter of scatter_to_zero
1031  ierr = PetscObjectCompose((PetscObject)dm()->get(), "scatter_to_zero",
1032  (PetscObject)((::VecScatter)scatter_to_zero));
1033  PISM_CHK(ierr, "PetscObjectCompose");
1034 
1035  // this increments the reference counter of v_proc0
1036  ierr = PetscObjectCompose((PetscObject)dm()->get(), "v_proc0",
1037  (PetscObject)v_proc0);
1038  PISM_CHK(ierr, "PetscObjectCompose");
1039 
1040  // We DO NOT call VecDestroy(v_proc0): the petsc::Vec wrapper will
1041  // take care of this.
1042  result = v_proc0;
1043  } else {
1044  // We DO NOT call VecDestroy(result): the petsc::Vec wrapper will
1045  // take care of this.
1046  ierr = VecDuplicate(v_proc0, &result);
1047  PISM_CHK(ierr, "VecDuplicate");
1048  }
1049  return std::shared_ptr<petsc::Vec>(new petsc::Vec(result));
1050 }
1051 
1052 void IceModelVec::put_on_proc0(petsc::Vec &parallel, petsc::Vec &onp0) const {
1053  PetscErrorCode ierr = 0;
1054  VecScatter scatter_to_zero = NULL;
1055  Vec natural_work = NULL;
1056 
1057  ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1058  (PetscObject*)&scatter_to_zero);
1059  PISM_CHK(ierr, "PetscObjectQuery");
1060 
1061  ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1062  (PetscObject*)&natural_work);
1063  PISM_CHK(ierr, "PetscObjectQuery");
1064 
1065  if (natural_work == NULL || scatter_to_zero == NULL) {
1066  throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling put_on_proc0");
1067  }
1068 
1069  ierr = DMDAGlobalToNaturalBegin(*dm(), parallel, INSERT_VALUES, natural_work);
1070  PISM_CHK(ierr, "DMDAGlobalToNaturalBegin");
1071 
1072  ierr = DMDAGlobalToNaturalEnd(*dm(), parallel, INSERT_VALUES, natural_work);
1073  PISM_CHK(ierr, "DMDAGlobalToNaturalEnd");
1074 
1075  ierr = VecScatterBegin(scatter_to_zero, natural_work, onp0,
1076  INSERT_VALUES, SCATTER_FORWARD);
1077  PISM_CHK(ierr, "VecScatterBegin");
1078 
1079  ierr = VecScatterEnd(scatter_to_zero, natural_work, onp0,
1080  INSERT_VALUES, SCATTER_FORWARD);
1081  PISM_CHK(ierr, "VecScatterEnd");
1082 }
1083 
1084 
1085 //! Puts a local IceModelVec2S on processor 0.
1087  if (m_impl->ghosted) {
1089  this->copy_to_vec(dm(), tmp);
1090  put_on_proc0(tmp, onp0);
1091  } else {
1092  put_on_proc0(vec(), onp0);
1093  }
1094 }
1095 
1096 void IceModelVec::get_from_proc0(petsc::Vec &onp0, petsc::Vec &parallel) const {
1097  PetscErrorCode ierr;
1098 
1099  VecScatter scatter_to_zero = NULL;
1100  Vec natural_work = NULL;
1101  ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1102  (PetscObject*)&scatter_to_zero);
1103  PISM_CHK(ierr, "PetscObjectQuery");
1104 
1105  ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1106  (PetscObject*)&natural_work);
1107  PISM_CHK(ierr, "PetscObjectQuery");
1108 
1109  if (natural_work == NULL || scatter_to_zero == NULL) {
1110  throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling get_from_proc0");
1111  }
1112 
1113  ierr = VecScatterBegin(scatter_to_zero, onp0, natural_work,
1114  INSERT_VALUES, SCATTER_REVERSE);
1115  PISM_CHK(ierr, "VecScatterBegin");
1116 
1117  ierr = VecScatterEnd(scatter_to_zero, onp0, natural_work,
1118  INSERT_VALUES, SCATTER_REVERSE);
1119  PISM_CHK(ierr, "VecScatterEnd");
1120 
1121  ierr = DMDANaturalToGlobalBegin(*dm(), natural_work, INSERT_VALUES, parallel);
1122  PISM_CHK(ierr, "DMDANaturalToGlobalBegin");
1123 
1124  ierr = DMDANaturalToGlobalEnd(*dm(), natural_work, INSERT_VALUES, parallel);
1125  PISM_CHK(ierr, "DMDANaturalToGlobalEnd");
1126 }
1127 
1128 //! Gets a local IceModelVec2 from processor 0.
1130  if (m_impl->ghosted) {
1132  get_from_proc0(onp0, tmp);
1133  global_to_local(*dm(), tmp, vec());
1134  } else {
1135  get_from_proc0(onp0, vec());
1136  }
1137  inc_state_counter(); // mark as modified
1138 }
1139 
1140 /*!
1141  * Copy data to rank 0 and compute the checksum.
1142  *
1143  * Does not use ghosts. Results should be independent of the parallel domain
1144  * decomposition.
1145  */
1147 
1148  auto v = allocate_proc0_copy();
1149  put_on_proc0(*v);
1150 
1151  MPI_Comm com = m_impl->grid->ctx()->com();
1152 
1153  int rank = 0;
1154  MPI_Comm_rank(com, &rank);
1155 
1156  uint64_t result = 0;
1157  if (rank == 0) {
1158  petsc::VecArray array(*v);
1159 
1160  PetscInt size = 0;
1161  PetscErrorCode ierr = VecGetLocalSize(*v, &size);
1162  PISM_CHK(ierr, "VecGetLocalSize");
1163 
1164  result = pism::fletcher64((uint32_t*)array.get(), size * 2);
1165  }
1166  MPI_Bcast(&result, 1, MPI_UINT64_T, 0, com);
1167 
1168  return result;
1169 }
1170 
1171 /*!
1172  * Compute a checksum of a vector.
1173  *
1174  * The result depends on the number of processors used.
1175  *
1176  * We assume that sizeof(double) == 2 * sizeof(uint32_t), i.e. double uses 64 bits.
1177  */
1178 uint64_t IceModelVec::fletcher64() const {
1179  static_assert(sizeof(double) == 2 * sizeof(uint32_t), "Cannot compile IceModelVec::fletcher64() (sizeof(double) != 2 * sizeof(uint32_t))");
1180 
1181  MPI_Status mpi_stat;
1182  const int checksum_tag = 42;
1183 
1184  MPI_Comm com = m_impl->grid->ctx()->com();
1185 
1186  int rank = 0;
1187  MPI_Comm_rank(com, &rank);
1188 
1189  int comm_size = 0;
1190  MPI_Comm_size(com, &comm_size);
1191 
1192  PetscInt local_size = 0;
1193  PetscErrorCode ierr = VecGetLocalSize(vec(), &local_size); PISM_CHK(ierr, "VecGetLocalSize");
1194  uint64_t sum = 0;
1195  {
1196  petsc::VecArray v(vec());
1197  // compute checksums for local patches on all ranks
1198  sum = pism::fletcher64((uint32_t*)v.get(), local_size * 2);
1199  }
1200 
1201  if (rank == 0) {
1202  std::vector<uint64_t> sums(comm_size);
1203 
1204  // gather checksums of patches on rank 0
1205  sums[0] = sum;
1206  for (int r = 1; r < comm_size; ++r) {
1207  MPI_Recv(&sums[r], 1, MPI_UINT64_T, r, checksum_tag, com, &mpi_stat);
1208  }
1209 
1210  // compute the checksum of checksums
1211  sum = pism::fletcher64((uint32_t*)sums.data(), comm_size * 2);
1212  } else {
1213  MPI_Send(&sum, 1, MPI_UINT64_T, 0, checksum_tag, com);
1214  }
1215 
1216  // broadcast to all ranks
1217  MPI_Bcast(&sum, 1, MPI_UINT64_T, 0, com);
1218 
1219  return sum;
1220 }
1221 
1222 std::string IceModelVec::checksum(bool serial) const {
1223  if (serial) {
1224  // unsigned long long is supposed to be at least 64 bit long
1225  return pism::printf("%016llx", (unsigned long long int)this->fletcher64_serial());
1226  }
1227  // unsigned long long is supposed to be at least 64 bit long
1228  return pism::printf("%016llx", (unsigned long long int)this->fletcher64());
1229 }
1230 
1231 void IceModelVec::print_checksum(const char *prefix, bool serial) const {
1232  auto log = m_impl->grid->ctx()->log();
1233 
1234  log->message(1, "%s%s: %s\n", prefix, m_impl->name.c_str(), checksum(serial).c_str());
1235 }
1236 
1238  const std::string &spec1, const std::string &spec2) {
1239  units::Converter c(system, spec1, spec2);
1240 
1241  // has to be a PetscInt because of the VecGetLocalSize() call
1242  PetscInt data_size = 0;
1243  PetscErrorCode ierr = VecGetLocalSize(v, &data_size);
1244  PISM_CHK(ierr, "VecGetLocalSize");
1245 
1246  petsc::VecArray data(v);
1247  c.convert_doubles(data.get(), data_size);
1248 }
1249 
1251  const IceModelVec2Stag &input,
1252  bool include_floating_ice,
1253  IceModelVec2S &result) {
1254 
1255  using mask::grounded_ice;
1256  using mask::icy;
1257 
1258  assert(cell_type.stencil_width() > 0);
1259  assert(input.stencil_width() > 0);
1260 
1261  IceGrid::ConstPtr grid = result.grid();
1262 
1263  IceModelVec::AccessList list{&cell_type, &input, &result};
1264 
1265  for (Points p(*grid); p; p.next()) {
1266  const int i = p.i(), j = p.j();
1267 
1268  if (cell_type.grounded_ice(i, j) or
1269  (include_floating_ice and cell_type.icy(i, j))) {
1270  auto M = cell_type.star(i, j);
1271  auto F = input.star(i, j);
1272 
1273  int n = 0, e = 0, s = 0, w = 0;
1274  if (include_floating_ice) {
1275  n = static_cast<int>(icy(M.n));
1276  e = static_cast<int>(icy(M.e));
1277  s = static_cast<int>(icy(M.s));
1278  w = static_cast<int>(icy(M.w));
1279  } else {
1280  n = static_cast<int>(grounded_ice(M.n));
1281  e = static_cast<int>(grounded_ice(M.e));
1282  s = static_cast<int>(grounded_ice(M.s));
1283  w = static_cast<int>(grounded_ice(M.w));
1284  }
1285 
1286  if (n + e + s + w > 0) {
1287  result(i, j) = (n * F.n + e * F.e + s * F.s + w * F.w) / (n + e + s + w);
1288  } else {
1289  result(i, j) = 0.0;
1290  }
1291  } else {
1292  result(i, j) = 0.0;
1293  }
1294  }
1295 }
1296 
1298  const IceModelVec2Stag &input,
1299  bool include_floating_ice,
1300  IceModelVec2V &result) {
1301 
1302  using mask::grounded_ice;
1303  using mask::icy;
1304 
1305  assert(cell_type.stencil_width() > 0);
1306  assert(input.stencil_width() > 0);
1307 
1308  IceGrid::ConstPtr grid = result.grid();
1309 
1310  IceModelVec::AccessList list{&cell_type, &input, &result};
1311 
1312  for (Points p(*grid); p; p.next()) {
1313  const int i = p.i(), j = p.j();
1314 
1315  auto M = cell_type.star(i, j);
1316  auto F = input.star(i, j);
1317 
1318  int n = 0, e = 0, s = 0, w = 0;
1319  if (include_floating_ice) {
1320  n = static_cast<int>(icy(M.n));
1321  e = static_cast<int>(icy(M.e));
1322  s = static_cast<int>(icy(M.s));
1323  w = static_cast<int>(icy(M.w));
1324  } else {
1325  n = static_cast<int>(grounded_ice(M.n));
1326  e = static_cast<int>(grounded_ice(M.e));
1327  s = static_cast<int>(grounded_ice(M.s));
1328  w = static_cast<int>(grounded_ice(M.w));
1329  }
1330 
1331  if (e + w > 0) {
1332  result(i, j).u = (e * F.e + w * F.w) / (e + w);
1333  } else {
1334  result(i, j).u = 0.0;
1335  }
1336 
1337  if (n + s > 0) {
1338  result(i, j).v = (n * F.n + s * F.s) / (n + s);
1339  } else {
1340  result(i, j).v = 0.0;
1341  }
1342  }
1343 }
1344 
1345 //! \brief View a 2D vector field using existing PETSc viewers.
1346 void IceModelVec::view(std::vector<std::shared_ptr<petsc::Viewer> > viewers) const {
1347  PetscErrorCode ierr;
1348 
1349  if (ndims() == 3) {
1351  "cannot 'view' a 3D field '%s'",
1352  get_name().c_str());
1353  }
1354 
1355  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
1356  // and we just need a global Vec):
1357  auto da2 = m_impl->grid->get_dm(1, 0);
1358 
1359  petsc::TemporaryGlobalVec tmp(da2);
1360 
1361  for (unsigned int i = 0; i < ndof(); ++i) {
1362  std::string
1363  long_name = m_impl->metadata[i].get_string("long_name"),
1364  units = m_impl->metadata[i].get_string("units"),
1365  glaciological_units = m_impl->metadata[i].get_string("glaciological_units"),
1366  title = long_name + " (" + glaciological_units + ")";
1367 
1368  PetscViewer v = *viewers[i].get();
1369 
1370  PetscDraw draw = NULL;
1371  ierr = PetscViewerDrawGetDraw(v, 0, &draw);
1372  PISM_CHK(ierr, "PetscViewerDrawGetDraw");
1373 
1374  ierr = PetscDrawSetTitle(draw, title.c_str());
1375  PISM_CHK(ierr, "PetscDrawSetTitle");
1376 
1377  get_dof(da2, tmp, i);
1378 
1379  convert_vec(tmp, m_impl->metadata[i].unit_system(),
1380  units, glaciological_units);
1381 
1382  double bounds[2] = {0.0, 0.0};
1383  ierr = VecMin(tmp, NULL, &bounds[0]); PISM_CHK(ierr, "VecMin");
1384  ierr = VecMax(tmp, NULL, &bounds[1]); PISM_CHK(ierr, "VecMax");
1385 
1386  if (bounds[0] == bounds[1]) {
1387  bounds[0] = -1.0;
1388  bounds[1] = 1.0;
1389  }
1390 
1391  ierr = PetscViewerDrawSetBounds(v, 1, bounds);
1392  PISM_CHK(ierr, "PetscViewerDrawSetBounds");
1393 
1394  ierr = VecView(tmp, v);
1395  PISM_CHK(ierr, "VecView");
1396  }
1397 }
1398 
1399 } // end of namespace pism
std::vector< const PetscAccessible * > m_vecs
Definition: iceModelVec.hh:68
void add(const PetscAccessible &v)
Definition: iceModelVec.cc:967
Makes sure that we call begin_access() and end_access() for all accessed IceModelVecs.
Definition: iceModelVec.hh:59
std::string filename() const
Definition: File.cc:310
High-level PISM I/O class.
Definition: File.hh:51
std::shared_ptr< const IceGrid > ConstPtr
Definition: IceGrid.hh:233
bool icy(int i, int j) const
bool grounded_ice(int i, int j) const
"Cell type" mask. Adds convenience methods to IceModelVec2Int.
stencils::Star< int > star(int i, int j) const
stencils::Star< double > star(int i, int j) const
Returns the values at interfaces of the cell i,j using the staggered grid.
A class for storing and accessing internal staggered-grid 2D fields. Uses dof=2 storage....
Definition: iceModelVec.hh:449
std::vector< int > shape() const
Definition: iceModelVec.cc:156
size_t size() const
Return the total number of elements in the owned part of an array.
Definition: iceModelVec.cc:980
void regrid_impl(const File &file, RegriddingFlag flag, double default_value=0.0)
Gets an IceModelVec from a file file, interpolating onto the current grid.
Definition: iceModelVec.cc:422
void update_ghosts()
Updates ghost points.
Definition: iceModelVec.cc:669
void get_dof(std::shared_ptr< petsc::DM > da_result, petsc::Vec &result, unsigned int start, unsigned int count=1) const
Definition: iceModelVec.cc:277
void read_impl(const File &file, unsigned int time)
Reads appropriate NetCDF variable(s) into an IceModelVec.
Definition: iceModelVec.cc:476
std::string checksum(bool serial) const
void set_name(const std::string &name)
Sets the variable name to name.
Definition: iceModelVec.cc:374
unsigned int stencil_width() const
Get the stencil width of the current IceModelVec. Returns 0 if ghosts are not available.
Definition: iceModelVec.cc:334
SpatialVariableMetadata & metadata(unsigned int N=0)
Returns a reference to the SpatialVariableMetadata object containing metadata for the compoment N.
Definition: iceModelVec.cc:533
void view(std::vector< std::shared_ptr< petsc::Viewer > > viewers) const
View a 2D vector field using existing PETSc viewers.
void dump(const char filename[]) const
Dumps a variable to a file, overwriting this file's contents (for debugging).
Definition: iceModelVec.cc:583
uint64_t fletcher64() const
uint64_t fletcher64_serial() const
void set_attrs(const std::string &pism_intent, const std::string &long_name, const std::string &units, const std::string &glaciological_units, const std::string &standard_name, unsigned int component)
Sets NetCDF attributes of an IceModelVec object.
Definition: iceModelVec.cc:399
void regrid(const std::string &filename, RegriddingFlag flag, double default_value=0.0)
Definition: iceModelVec.cc:838
void print_checksum(const char *prefix="", bool serial=false) const
virtual void begin_access() const
Checks if an IceModelVec is allocated and calls DAVecGetArray.
Definition: iceModelVec.cc:622
petsc::Vec & vec() const
Definition: iceModelVec.cc:342
std::shared_ptr< petsc::DM > dm() const
Definition: iceModelVec.cc:356
virtual ~IceModelVec()
Definition: iceModelVec.cc:101
void set(double c)
Result: v[j] <- c for all j.
Definition: iceModelVec.cc:683
void get_from_proc0(petsc::Vec &onp0)
Gets a local IceModelVec2 from processor 0.
void scale(double alpha)
Result: v <- v * alpha. Calls VecScale.
Definition: iceModelVec.cc:252
void read(const std::string &filename, unsigned int time)
Definition: iceModelVec.cc:833
IceGrid::ConstPtr grid() const
Definition: iceModelVec.cc:128
void put_on_proc0(petsc::Vec &onp0) const
Puts a local IceModelVec2S on processor 0.
void check_array_indices(int i, int j, unsigned int k) const
Check array indices and warn if they are out of range.
Definition: iceModelVec.cc:690
void set_dof(std::shared_ptr< petsc::DM > da_source, petsc::Vec &source, unsigned int start, unsigned int count=1)
Definition: iceModelVec.cc:304
std::array< double, 2 > range() const
Result: min <- min(v[j]), max <- max(v[j]).
Definition: iceModelVec.cc:193
std::vector< double > levels() const
Definition: iceModelVec.cc:136
void checkCompatibility(const char *function, const IceModelVec &other) const
Checks if two IceModelVecs have compatible sizes, dimensions and numbers of degrees of freedom.
Definition: iceModelVec.cc:599
void define(const File &file, IO_Type default_type=PISM_DOUBLE) const
Define variables corresponding to an IceModelVec in a file opened using file.
Definition: iceModelVec.cc:523
void write(const std::string &filename) const
Definition: iceModelVec.cc:822
virtual void end_access() const
Checks if an IceModelVec is allocated and calls DAVecRestoreArray.
Definition: iceModelVec.cc:643
void copy_to_vec(std::shared_ptr< petsc::DM > destination_da, petsc::Vec &destination) const
Copies v to a global vector 'destination'. Ghost points are discarded.
Definition: iceModelVec.cc:266
void write_impl(const File &file) const
Writes an IceModelVec to a NetCDF file.
Definition: iceModelVec.cc:544
std::shared_ptr< petsc::Vec > allocate_proc0_copy() const
Definition: iceModelVec.cc:997
void set_begin_access_use_dof(bool flag)
Definition: iceModelVec.cc:181
std::vector< double > norm(int n) const
Computes the norm of all the components of an IceModelVec.
Definition: iceModelVec.cc:769
unsigned int ndims() const
Returns the number of spatial dimensions.
Definition: iceModelVec.cc:152
const std::string & get_name() const
Get the name of an IceModelVec object.
Definition: iceModelVec.cc:386
void inc_state_counter()
Increment the object state counter.
Definition: iceModelVec.cc:147
int state_counter() const
Get the object state counter.
Definition: iceModelVec.cc:124
void shift(double alpha)
Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
Definition: iceModelVec.cc:244
unsigned int ndof() const
Returns the number of degrees of freedom per grid point.
Definition: iceModelVec.cc:132
IceModelVec(IceGrid::ConstPtr grid, const std::string &name, IceModelVecKind ghostedp, size_t dof, size_t stencil_width, const std::vector< double > &zlevels)
Definition: iceModelVec.cc:56
void add(double alpha, const IceModelVec &x)
Result: v <- v + alpha * x. Calls VecAXPY.
Definition: iceModelVec.cc:234
void set_time_independent(bool flag)
Set the time independent flag for all variables corresponding to this IceModelVec instance.
Definition: iceModelVec.cc:175
Abstract class for reading, writing, allocating, and accessing a DA-based PETSc Vec (2D and 3D fields...
Definition: iceModelVec.hh:202
std::shared_ptr< const Logger > ConstPtr
Definition: Logger.hh:46
void failed()
Indicates a failure of a parallel section.
virtual void begin_access() const =0
void add_context(const std::string &message)
Add a message providing some context. This way we can (sort of) get a stack trace even though C++ exc...
static RuntimeError formatted(const ErrorLocation &location, const char format[],...) __attribute__((format(printf
build a RuntimeError with a formatted message
Spatial NetCDF variable (corresponding to a 2D or 3D scalar field).
IO_Type get_output_type() const
double * get()
Definition: Vec.cc:54
Wrapper around VecGetArray and VecRestoreArray.
Definition: Vec.hh:44
void convert_doubles(double *data, size_t length) const
Definition: Units.cc:252
std::shared_ptr< System > Ptr
Definition: Units.hh:47
#define PISM_CHK(errcode, name)
#define PISM_ERROR_LOCATION
#define n
Definition: exactTestM.c:37
void append_time(const File &file, const Config &config, double time_seconds)
Prepare a file for output.
Definition: io_helpers.cc:257
void read_spatial_variable(const SpatialVariableMetadata &variable, const IceGrid &grid, const File &file, unsigned int time, double *output)
Read a variable from a file into an array output.
Definition: io_helpers.cc:617
void define_spatial_variable(const SpatialVariableMetadata &var, const IceGrid &grid, const File &file, IO_Type default_type)
Define a NetCDF variable corresponding to a VariableMetadata object.
Definition: io_helpers.cc:559
void write_spatial_variable(const SpatialVariableMetadata &var, const IceGrid &grid, const File &file, const double *input)
Write a double array to a file.
Definition: io_helpers.cc:720
void regrid_spatial_variable(SpatialVariableMetadata &var, const IceGrid &grid, const File &file, RegriddingFlag flag, bool report_range, bool allow_extrapolation, double default_value, InterpolationType interpolation_type, double *output)
Regrid from a NetCDF file into a distributed array output.
Definition: io_helpers.cc:786
void define_time(const File &file, const Context &ctx)
Prepare a file for output.
Definition: io_helpers.cc:220
bool icy(int M)
Ice-filled cell (grounded or floating).
Definition: Mask.hh:47
bool grounded_ice(int M)
Definition: Mask.hh:50
void compute_params(const IceModelVec *const x, const IceModelVec *const y, const IceModelVec *const z, int &ghosts, bool &scatter)
Compute parameters for 2D loop computations involving 3 IceModelVecs.
Definition: iceModelVec.cc:728
double get_time()
double max(const IceModelVec2S &input)
Finds maximum over all the values in an IceModelVec2S object. Ignores ghosts.
static double F(double SL, double B, double H, double alpha)
IO_Type
Definition: IO_Flags.hh:31
@ PISM_NAT
Definition: IO_Flags.hh:32
@ PISM_DOUBLE
Definition: IO_Flags.hh:38
static double start_time(const Config &config, const Logger &log, const File *file, const std::string &reference_date, const std::string &calendar, const units::Unit &time_units)
Definition: Time.cc:347
void GlobalMax(MPI_Comm comm, double *local, double *result, int count)
IO_Backend string_to_backend(const std::string &backend)
Definition: File.cc:60
static NormType int_to_normtype(int input)
Definition: iceModelVec.cc:219
std::string printf(const char *format,...)
uint64_t fletcher64(const uint32_t *data, size_t length)
static const double k
Definition: exactTestP.cc:45
static double end_time(const Config &config, double time_start, const std::string &calendar, const units::Unit &time_units)
Definition: Time.cc:398
RegriddingFlag
Definition: IO_Flags.hh:70
static void global_to_local(petsc::DM &dm, Vec source, Vec destination)
Definition: iceModelVec.cc:46
@ PISM_GUESS
Definition: IO_Flags.hh:41
std::string timestamp(MPI_Comm com)
Creates a time-stamp used for the history NetCDF attribute.
double min(const IceModelVec2S &input)
Finds minimum over all the values in an IceModelVec2S object. Ignores ghosts.
void handle_fatal_errors(MPI_Comm com)
@ PISM_READWRITE_CLOBBER
create a file for writing, overwrite if present
Definition: IO_Flags.hh:53
@ PISM_READWRITE
open an existing file for reading and writing
Definition: IO_Flags.hh:51
@ PISM_READONLY
open an existing file for reading only
Definition: IO_Flags.hh:49
void GlobalMin(MPI_Comm comm, double *local, double *result, int count)
void GlobalSum(MPI_Comm comm, double *local, double *result, int count)
void convert_vec(petsc::Vec &v, units::System::Ptr system, const std::string &spec1, const std::string &spec2)
void staggered_to_regular(const IceModelVec2CellType &cell_type, const IceModelVec2Stag &input, bool include_floating_ice, IceModelVec2S &result)
IceModelVecKind
What "kind" of a vector to create: with or without ghosts.
Definition: iceModelVec.hh:49
@ WITH_GHOSTS
Definition: iceModelVec.hh:49
double sum(const IceModelVec2S &input)
Sums up all the values in an IceModelVec2S object. Ignores ghosts.
bool report_range
If true, report range when regridding.
InterpolationType interpolation_type
std::shared_ptr< petsc::DM > da
distributed mesh manager (DM)
std::vector< double > zlevels
Vertical levels (for 3D fields)
unsigned int da_stencil_width
stencil width supported by the DA
int state_counter
Internal IceModelVec "revision number".
std::vector< SpatialVariableMetadata > metadata
Metadata (NetCDF variable attributes)
bool begin_access_use_dof
If true, use DMDAVecGetArrayDOF() in begin_access()
unsigned int dof
number of "degrees of freedom" per grid point
gsl_interp_accel * bsearch_accel
bool ghosted
true if this IceModelVec is ghosted
IceGrid::ConstPtr grid
The computational grid.
int count
Definition: test_cube.c:16