PISM, A Parallel Ice Sheet Model  stable v2.1-1-g6902d5502 committed by Ed Bueler on 2023-12-20 08:38:27 -0800
Array.cc
Go to the documentation of this file.
1 // Copyright (C) 2008--2023 Ed Bueler, Constantine Khroulev, and David Maxwell
2 //
3 // This file is part of PISM.
4 //
5 // PISM is free software; you can redistribute it and/or modify it under the
6 // terms of the GNU General Public License as published by the Free Software
7 // Foundation; either version 3 of the License, or (at your option) any later
8 // version.
9 //
10 // PISM is distributed in the hope that it will be useful, but WITHOUT ANY
11 // WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12 // FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
13 // details.
14 //
15 // You should have received a copy of the GNU General Public License
16 // along with PISM; if not, write to the Free Software
17 // Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18 
19 #include <cassert>
20 
21 #include <cmath>
22 #include <cstddef>
23 #include <petscdraw.h>
24 #include <string>
25 
26 #include "pism/util/array/Array.hh"
27 #include "pism/util/array/Array_impl.hh"
28 
29 #include "pism/util/Time.hh"
30 #include "pism/util/Grid.hh"
31 #include "pism/util/ConfigInterface.hh"
32 
33 #include "pism/util/error_handling.hh"
34 #include "pism/util/io/io_helpers.hh"
35 #include "pism/util/Logger.hh"
36 #include "pism/util/Profiling.hh"
37 #include "pism/util/petscwrappers/VecScatter.hh"
38 #include "pism/util/petscwrappers/Viewer.hh"
39 #include "pism/util/Context.hh"
40 #include "pism/util/VariableMetadata.hh"
41 #include "pism/util/io/File.hh"
42 #include "pism/util/pism_utilities.hh"
43 #include "pism/util/io/IO_Flags.hh"
44 
45 namespace pism {
46 
47 namespace array {
48 
49 void global_to_local(petsc::DM &dm, Vec source, Vec destination) {
50  PetscErrorCode ierr;
51 
52  ierr = DMGlobalToLocalBegin(dm, source, INSERT_VALUES, destination);
53  PISM_CHK(ierr, "DMGlobalToLocalBegin");
54 
55  ierr = DMGlobalToLocalEnd(dm, source, INSERT_VALUES, destination);
56  PISM_CHK(ierr, "DMGlobalToLocalEnd");
57 }
58 
59 Array::Array(std::shared_ptr<const Grid> grid,
60  const std::string &name,
61  Kind ghostedp,
62  size_t dof,
63  size_t stencil_width,
64  const std::vector<double> &zlevels) {
65  m_impl = new Impl();
66  m_array = nullptr;
67 
68  m_impl->name = name;
69  m_impl->grid = grid;
70  m_impl->ghosted = (ghostedp == WITH_GHOSTS);
71  m_impl->dof = dof;
72  m_impl->zlevels = zlevels;
73 
74  const auto &config = grid->ctx()->config();
75 
76  auto max_stencil_width = static_cast<size_t>(config->get_number("grid.max_stencil_width"));
77  if ((dof != 1) or (stencil_width > max_stencil_width)) {
78  // use the requested stencil width *if* we have to
80  } else {
81  // otherwise use the "standard" stencil width
82  m_impl->da_stencil_width = max_stencil_width;
83  }
84 
85  auto system = m_impl->grid->ctx()->unit_system();
86  if (m_impl->dof > 1) {
87  // dof > 1: this is a 2D vector
88  for (unsigned int j = 0; j < m_impl->dof; ++j) {
89  m_impl->metadata.push_back({system, pism::printf("%s[%d]", name.c_str(), j)});
90  }
91  } else {
92  // both 2D and 3D vectors
93  m_impl->metadata = {{system, name, zlevels}};
94  }
95 
96  if (zlevels.size() > 1) {
97  m_impl->bsearch_accel = gsl_interp_accel_alloc();
98  if (m_impl->bsearch_accel == NULL) {
100  "Failed to allocate a GSL interpolation accelerator");
101  }
102  }
103 }
104 
106  assert(m_impl->access_counter == 0);
107 
108  if (m_impl->bsearch_accel != nullptr) {
109  gsl_interp_accel_free(m_impl->bsearch_accel);
110  m_impl->bsearch_accel = nullptr;
111  }
112 
113  delete m_impl;
114  m_impl = nullptr;
115 }
116 
117 //! \brief Get the object state counter.
118 /*!
119  * This method returns the "revision number" of an Array.
120  *
121  * It can be used to determine it a field was updated and if a certain
122  * computation needs to be re-done. One example is computing the smoothed bed
123  * for the SIA computation, which is only necessary if the bed deformation code
124  * fired.
125  *
126  * See also inc_state_counter().
127  */
128 int Array::state_counter() const {
129  return m_impl->state_counter;
130 }
131 
132 std::shared_ptr<const Grid> Array::grid() const {
133  return m_impl->grid;
134 }
135 
136 unsigned int Array::ndof() const {
137  return m_impl->dof;
138 }
139 
140 const std::vector<double>& Array::levels() const {
141  return m_impl->zlevels;
142 }
143 
144 //! \brief Increment the object state counter.
145 /*!
146  * See the documentation of get_state_counter(). This method is the
147  * *only* way to manually increment the state counter. It is also
148  * automatically updated by Array methods that are known to
149  * change stored values.
150  */
153 }
154 
155 //! Returns the number of spatial dimensions.
156 unsigned int Array::ndims() const {
157  return m_impl->zlevels.size() > 1 ? 3 : 2;
158 }
159 
160 std::vector<int> Array::shape() const {
161 
162  auto grid = m_impl->grid;
163 
164  if (ndims() == 3) {
165  return {(int)grid->My(), (int)grid->Mx(), (int)levels().size()};
166  }
167 
168  if (ndof() == 1) {
169  return {(int)grid->My(), (int)grid->Mx()};
170  }
171 
172  return {(int)grid->My(), (int)grid->Mx(), (int)ndof()};
173 }
174 
177 }
178 
180  if (not (type == LINEAR or type == NEAREST)) {
182  "invalid interpolation type: %d", (int)type);
183  }
184  m_impl->interpolation_type = type;
185 }
186 
187 //! Result: min <- min(v[j]), max <- max(v[j]).
188 /*!
189 PETSc manual correctly says "VecMin and VecMax are collective on Vec" but
190 GlobalMax,GlobalMin \e are needed, when m_impl->ghosted==true, to get correct
191 values because Vecs created with DACreateLocalVector() are of type
192 VECSEQ and not VECMPI. See src/trypetsc/localVecMax.c.
193  */
194 std::array<double,2> Array::range() const {
195  PetscErrorCode ierr;
196 
197  double min{0.0};
198  ierr = VecMin(vec(), NULL, &min);
199  PISM_CHK(ierr, "VecMin");
200 
201  double max{0.0};
202  ierr = VecMax(vec(), NULL, &max);
203  PISM_CHK(ierr, "VecMax");
204 
205  if (m_impl->ghosted) {
206  // needs a reduce operation; use GlobalMin and GlobalMax;
207  min = GlobalMin(m_impl->grid->com, min);
208  max = GlobalMax(m_impl->grid->com, max);
209  }
210  return {min, max};
211 }
212 
213 /** Convert from `int` to PETSc's `NormType`.
214  *
215  *
216  * @param[in] input norm type as an integer
217  *
218  * @return norm type as PETSc's `NormType`.
219  */
220 static NormType int_to_normtype(int input) {
221  switch (input) {
222  case NORM_1:
223  return NORM_1;
224  case NORM_2:
225  return NORM_2;
226  case NORM_INFINITY:
227  return NORM_INFINITY;
228  default:
229  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid norm type: %d",
230  input);
231  }
232 }
233 
234 //! Result: v <- v + alpha * x. Calls VecAXPY.
235 void Array::add(double alpha, const Array &x) {
236  checkCompatibility("add", x);
237 
238  PetscErrorCode ierr = VecAXPY(vec(), alpha, x.vec());
239  PISM_CHK(ierr, "VecAXPY");
240 
241  inc_state_counter(); // mark as modified
242 }
243 
244 //! Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
245 void Array::shift(double alpha) {
246  PetscErrorCode ierr = VecShift(vec(), alpha);
247  PISM_CHK(ierr, "VecShift");
248 
249  inc_state_counter(); // mark as modified
250 }
251 
252 //! Result: v <- v * alpha. Calls VecScale.
253 void Array::scale(double alpha) {
254  PetscErrorCode ierr = VecScale(vec(), alpha);
255  PISM_CHK(ierr, "VecScale");
256 
257  inc_state_counter(); // mark as modified
258 }
259 
260 //! Copies v to a global vector 'destination'. Ghost points are discarded.
261 /*! This is potentially dangerous: make sure that `destination` has the same
262  dimensions as the current Array.
263 
264  DMLocalToGlobalBegin/End is broken in PETSc 3.5, so we roll our
265  own.
266  */
267 void Array::copy_to_vec(std::shared_ptr<petsc::DM> destination_da, petsc::Vec &destination) const {
268  // m_dof > 1 for vector, staggered grid 2D fields, etc. In this case
269  // zlevels.size() == 1. For 3D fields, m_dof == 1 (all 3D fields are
270  // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
271  // DM object. So we want the bigger of the two numbers here.
272  unsigned int N = std::max((size_t)m_impl->dof, m_impl->zlevels.size());
273 
274  this->get_dof(destination_da, destination, 0, N);
275 }
276 
277 void Array::get_dof(std::shared_ptr<petsc::DM> da_result, petsc::Vec &result, unsigned int start,
278  unsigned int count) const {
279  if (start >= m_impl->dof) {
280  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
281  }
282 
283  petsc::DMDAVecArrayDOF tmp_res(da_result, result), tmp_v(dm(), vec());
284 
285  double ***result_a = static_cast<double ***>(tmp_res.get()),
286  ***source_a = static_cast<double ***>(tmp_v.get());
287 
288  ParallelSection loop(m_impl->grid->com);
289  try {
290  for (auto p = m_impl->grid->points(); p; p.next()) {
291  const int i = p.i(), j = p.j();
292  PetscErrorCode ierr =
293  PetscMemcpy(result_a[j][i], &source_a[j][i][start], count * sizeof(PetscScalar));
294  PISM_CHK(ierr, "PetscMemcpy");
295  }
296  } catch (...) {
297  loop.failed();
298  }
299  loop.check();
300 }
301 
302 void Array::set_dof(std::shared_ptr<petsc::DM> da_source, petsc::Vec &source, unsigned int start,
303  unsigned int count) {
304  if (start >= m_impl->dof) {
305  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
306  }
307 
308  petsc::DMDAVecArrayDOF tmp_src(da_source, source), tmp_v(dm(), vec());
309 
310  double ***source_a = static_cast<double ***>(tmp_src.get()),
311  ***result_a = static_cast<double ***>(tmp_v.get());
312 
313  ParallelSection loop(m_impl->grid->com);
314  try {
315  for (auto p = m_impl->grid->points(); p; p.next()) {
316  const int i = p.i(), j = p.j();
317  PetscErrorCode ierr =
318  PetscMemcpy(&result_a[j][i][start], source_a[j][i], count * sizeof(PetscScalar));
319  PISM_CHK(ierr, "PetscMemcpy");
320  }
321  } catch (...) {
322  loop.failed();
323  }
324  loop.check();
325 
326  inc_state_counter(); // mark as modified
327 }
328 
329 //! @brief Get the stencil width of the current Array. Returns 0
330 //! if ghosts are not available.
331 unsigned int Array::stencil_width() const {
332  if (m_impl->ghosted) {
333  return m_impl->da_stencil_width;
334  }
335 
336  return 0;
337 }
338 
340  if (m_impl->v.get() == nullptr) {
341  PetscErrorCode ierr = 0;
342  if (m_impl->ghosted) {
343  ierr = DMCreateLocalVector(*dm(), m_impl->v.rawptr());
344  PISM_CHK(ierr, "DMCreateLocalVector");
345  } else {
346  ierr = DMCreateGlobalVector(*dm(), m_impl->v.rawptr());
347  PISM_CHK(ierr, "DMCreateGlobalVector");
348  }
349  }
350  return m_impl->v;
351 }
352 
353 std::shared_ptr<petsc::DM> Array::dm() const {
354  if (m_impl->da == nullptr) {
355  // dof > 1 for vector, staggered grid 2D fields, etc. In this case zlevels.size() ==
356  // 1. For 3D fields, dof == 1 (all 3D fields are scalar) and zlevels.size()
357  // corresponds to dof of the underlying PETSc DM object.
358  auto da_dof = std::max((unsigned int)m_impl->zlevels.size(), m_impl->dof);
359 
360  // initialize the da member:
361  m_impl->da = grid()->get_dm(da_dof, m_impl->da_stencil_width);
362  }
363  return m_impl->da;
364 }
365 
366 //! Sets the variable name to `name`.
367 /**
368  * This is the "overall" name of a field. This is **not** the same as the
369  * NetCDF variable name. Use `metadata(...).set_name(...)` to set that.
370  */
371 void Array::set_name(const std::string &name) {
372  m_impl->name = name;
373 }
374 
375 //! @brief Get the name of an Array object.
376 /**
377  * This is the name used to refer to this object in PISM (e.g. via the
378  * Vars class), **not** the name of the corresponding NetCDF variable.
379  * (The problem is that one Array instance may correspond to
380  * several NetCDF variables. Use metadata(...).get_name() to get the
381  * name of NetCDF variables an Array is saved to.)
382  */
383 const std::string &Array::get_name() const {
384  return m_impl->name;
385 }
386 
387 void set_default_value_or_stop(const std::string &filename, const VariableMetadata &variable,
388  io::Default default_value, const Logger &log,
389  Vec output) {
390 
391  if (not default_value.exists()) {
392  // if it's critical, print an error message and stop
394  "Can't find '%s' in the regridding file '%s'.",
395  variable.get_name().c_str(), filename.c_str());
396  }
397 
398  // If it is optional, fill with the provided default value.
399  // units::Converter constructor will make sure that units are compatible.
400  units::Converter c(variable.unit_system(), variable["units"], variable["output_units"]);
401 
402  std::string spacer(variable.get_name().size(), ' ');
403  log.message(2,
404  " absent %s / %-10s\n"
405  " %s \\ not found; using default constant %7.2f (%s)\n",
406  variable.get_name().c_str(), variable.get_string("long_name").c_str(), spacer.c_str(),
407  c(default_value), variable.get_string("output_units").c_str());
408 
409  PetscErrorCode ierr = VecSet(output, default_value);
410  PISM_CHK(ierr, "VecSet");
411 }
412 
413 static std::array<double, 2> compute_range(MPI_Comm com, const double *data, size_t data_size) {
414  double min_result = data[0], max_result = data[0];
415  for (size_t k = 0; k < data_size; ++k) {
416  min_result = std::min(min_result, data[k]);
417  max_result = std::max(max_result, data[k]);
418  }
419 
420  return { GlobalMin(com, min_result), GlobalMax(com, max_result) };
421 }
422 
423 //! Gets an Array from a file `file`, interpolating onto the current grid.
424 /*! Stops if the variable was not found and `critical` == true.
425  */
426 void Array::regrid_impl(const File &file, io::Default default_value) {
427 
428  auto log = grid()->ctx()->log();
429 
430  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
431  // and we just need a global Vec):
432  auto da2 = grid()->get_dm(1, 0);
433 
434  // a temporary one-component vector that uses a compatible domain decomposition
435  petsc::TemporaryGlobalVec tmp(da2);
436 
437  for (unsigned int j = 0; j < ndof(); ++j) {
438  auto variable = metadata(j);
439 
440  auto V = file.find_variable(variable.get_name(), variable["standard_name"]);
441 
442  if (not V.exists) {
443  set_default_value_or_stop(file.filename(), variable, default_value, *log, tmp);
444  } else {
445  grid::InputGridInfo input_grid(file, V.name, variable.unit_system(), grid()->registration());
446 
447  input_grid.report(*log, 4, variable.unit_system());
448 
449  // Note: this implementation is used for 2D fields, so we don't need to use the
450  // internal vertical (Z) grid.
451  io::check_input_grid(input_grid, *grid(), {0.0});
452 
453  LocalInterpCtx lic(input_grid, *grid(), levels(), m_impl->interpolation_type);
454 
455  // Note: this call will read the last time record (the index is set in `lic` based on
456  // info in `input_grid`).
457  petsc::VecArray tmp_array(tmp);
458  io::regrid_spatial_variable(metadata(j), *grid(), lic, file, tmp_array.get());
459 
460  // Check the range and report it if necessary.
461  {
462  const size_t data_size = grid()->xm() * grid()->ym() * levels().size();
463  auto range = compute_range(grid()->com, tmp_array.get(), data_size);
464  auto min = range[0];
465  auto max = range[1];
466 
467  if ((not std::isfinite(min)) or (not std::isfinite(max))) {
470  "Variable '%s' ('%s') contains numbers that are not finite (NaN or infinity)",
471  variable.get_name().c_str(), variable.get_string("long_name").c_str());
472  }
473 
474  // Check the range and warn the user if needed:
475  variable.check_range(file.filename(), min, max);
476  if (m_impl->report_range) {
477  variable.report_range(*log, min, max, V.found_using_standard_name);
478  }
479  }
480  }
481 
482  set_dof(da2, tmp, j);
483  }
484 
485  // The calls above only set the values owned by a processor, so we need to
486  // communicate if m_has_ghosts == true:
487  if (m_impl->ghosted) {
488  update_ghosts();
489  }
490 }
491 
492 //! Reads appropriate NetCDF variable(s) into an Array.
493 void Array::read_impl(const File &file, const unsigned int time) {
494 
495  auto log = grid()->ctx()->log();
496  log->message(4, " Reading %s...\n", m_impl->name.c_str());
497 
498  if (ndof() == 1) {
499  // This takes are of scalar variables (both 2D and 3D).
500  if (m_impl->ghosted) {
502 
503  petsc::VecArray tmp_array(tmp);
504  io::read_spatial_variable(metadata(0), *grid(), file, time, tmp_array.get());
505 
506  global_to_local(*dm(), tmp, vec());
507  } else {
508  petsc::VecArray v_array(vec());
509  io::read_spatial_variable(metadata(0), *grid(), file, time, v_array.get());
510  }
511  return;
512  }
513 
514  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
515  // and we just need a global Vec):
516  auto da2 = grid()->get_dm(1, 0);
517 
518  // A temporary one-component vector, distributed across processors
519  // the same way v is
520  petsc::TemporaryGlobalVec tmp(da2);
521 
522  for (unsigned int j = 0; j < ndof(); ++j) {
523 
524  {
525  petsc::VecArray tmp_array(tmp);
526  io::read_spatial_variable(metadata(j), *grid(), file, time, tmp_array.get());
527  }
528 
529  set_dof(da2, tmp, j);
530  }
531 
532  // The calls above only set the values owned by a processor, so we need to
533  // communicate if m_impl->ghosted is true:
534  if (m_impl->ghosted) {
535  update_ghosts();
536  }
537 }
538 
539 //! \brief Define variables corresponding to an Array in a file opened using `file`.
540 void Array::define(const File &file, io::Type default_type) const {
541  for (unsigned int j = 0; j < ndof(); ++j) {
542  io::Type type = metadata(j).get_output_type();
543  if (type == io::PISM_NAT) {
544  type = default_type;
545  }
546 
547  io::define_spatial_variable(metadata(j), *m_impl->grid, file, type);
548  }
549 }
550 
551 //! @brief Returns a reference to the SpatialVariableMetadata object
552 //! containing metadata for the compoment N.
554  assert(N < m_impl->dof);
555  return m_impl->metadata[N];
556 }
557 
558 const SpatialVariableMetadata& Array::metadata(unsigned int N) const {
559  assert(N < m_impl->dof);
560  return m_impl->metadata[N];
561 }
562 
563 //! Writes an Array to a NetCDF file.
564 void Array::write_impl(const File &file) const {
565  auto log = m_impl->grid->ctx()->log();
566  auto time = timestamp(m_impl->grid->com);
567 
568  // The simplest case:
569  if (ndof() == 1) {
570  log->message(3, "[%s] Writing %s...\n",
571  time.c_str(), metadata(0).get_name().c_str());
572 
573  if (m_impl->ghosted) {
575 
576  this->copy_to_vec(dm(), tmp);
577 
578  petsc::VecArray tmp_array(tmp);
579 
580  io::write_spatial_variable(metadata(0), *grid(), file, tmp_array.get());
581  } else {
582  petsc::VecArray v_array(vec());
583  io::write_spatial_variable(metadata(0), *grid(), file, v_array.get());
584  }
585  return;
586  }
587 
588  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
589  // and we just need a global Vec):
590  auto da2 = m_impl->grid->get_dm(1, 0);
591 
592  // a temporary one-component vector, distributed across processors
593  // the same way v is
594  petsc::TemporaryGlobalVec tmp(da2);
595 
596  for (unsigned int j = 0; j < ndof(); ++j) {
597  get_dof(da2, tmp, j);
598 
599  petsc::VecArray tmp_array(tmp);
600  log->message(3, "[%s] Writing %s...\n",
601  time.c_str(), metadata(j).get_name().c_str());
602  io::write_spatial_variable(metadata(j), *grid(), file, tmp_array.get());
603  }
604 }
605 
606 //! Dumps a variable to a file, overwriting this file's contents (for debugging).
607 void Array::dump(const char filename[]) const {
608  File file(m_impl->grid->com, filename,
609  string_to_backend(m_impl->grid->ctx()->config()->get_string("output.format")),
611  m_impl->grid->ctx()->pio_iosys_id());
612 
613  if (not m_impl->metadata[0].get_time_independent()) {
614  io::define_time(file, *m_impl->grid->ctx());
615  io::append_time(file, *m_impl->grid->ctx()->config(), m_impl->grid->ctx()->time()->current());
616  }
617 
618  define(file, io::PISM_DOUBLE);
619  write(file);
620 }
621 
622 //! Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
623 void Array::checkCompatibility(const char* func, const Array &other) const {
624  PetscErrorCode ierr;
625  // We have to use PetscInt because of VecGetSizes below.
626  PetscInt X_size, Y_size;
627 
628  if (m_impl->dof != other.m_impl->dof) {
629  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Array::%s(...): operands have different numbers of degrees of freedom",
630  func);
631  }
632 
633  ierr = VecGetSize(vec(), &X_size);
634  PISM_CHK(ierr, "VecGetSize");
635 
636  ierr = VecGetSize(other.vec(), &Y_size);
637  PISM_CHK(ierr, "VecGetSize");
638 
639  if (X_size != Y_size) {
640  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Array::%s(...): incompatible Vec sizes (called as %s.%s(%s))",
641  func, m_impl->name.c_str(), func, other.m_impl->name.c_str());
642  }
643 }
644 
645 //! Checks if an Array is allocated and calls DAVecGetArray.
646 void Array::begin_access() const {
647 
648  if (m_impl->access_counter < 0) {
649  throw RuntimeError(PISM_ERROR_LOCATION, "Array::begin_access(): m_access_counter < 0");
650  }
651 
652  if (m_impl->access_counter == 0) {
653  PetscErrorCode ierr;
655  ierr = DMDAVecGetArrayDOF(*dm(), vec(), &m_array);
656  PISM_CHK(ierr, "DMDAVecGetArrayDOF");
657  } else {
658  ierr = DMDAVecGetArray(*dm(), vec(), &m_array);
659  PISM_CHK(ierr, "DMDAVecGetArray");
660  }
661  }
662 
664 }
665 
666 //! Checks if an Array is allocated and calls DAVecRestoreArray.
667 void Array::end_access() const {
668  PetscErrorCode ierr;
669 
670  if (m_array == NULL) {
672  "Array::end_access(): a == NULL (looks like begin_access() was not called)");
673  }
674 
675  if (m_impl->access_counter < 0) {
676  throw RuntimeError(PISM_ERROR_LOCATION, "Array::end_access(): m_access_counter < 0");
677  }
678 
680  if (m_impl->access_counter == 0) {
682  ierr = DMDAVecRestoreArrayDOF(*dm(), vec(), &m_array);
683  PISM_CHK(ierr, "DMDAVecRestoreArrayDOF");
684  } else {
685  ierr = DMDAVecRestoreArray(*dm(), vec(), &m_array);
686  PISM_CHK(ierr, "DMDAVecRestoreArray");
687  }
688  m_array = NULL;
689  }
690 }
691 
692 //! Updates ghost points.
694  PetscErrorCode ierr;
695  if (not m_impl->ghosted) {
696  return;
697  }
698 
699  ierr = DMLocalToLocalBegin(*dm(), vec(), INSERT_VALUES, vec());
700  PISM_CHK(ierr, "DMLocalToLocalBegin");
701 
702  ierr = DMLocalToLocalEnd(*dm(), vec(), INSERT_VALUES, vec());
703  PISM_CHK(ierr, "DMLocalToLocalEnd");
704 }
705 
706 //! Result: v[j] <- c for all j.
707 void Array::set(const double c) {
708  PetscErrorCode ierr = VecSet(vec(),c);
709  PISM_CHK(ierr, "VecSet");
710 
711  inc_state_counter(); // mark as modified
712 }
713 
714 void Array::check_array_indices(int i, int j, unsigned int k) const {
715  double ghost_width = 0;
716  if (m_impl->ghosted) {
717  ghost_width = m_impl->da_stencil_width;
718  }
719  // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
720  // zlevels.size() == 1. For 3D fields, m_impl->dof == 1 (all 3D fields are
721  // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
722  // DM object. So we want the bigger of the two numbers here.
723  unsigned int N = std::max((size_t)m_impl->dof, m_impl->zlevels.size());
724 
725  bool out_of_range = (i < m_impl->grid->xs() - ghost_width) ||
726  (i > m_impl->grid->xs() + m_impl->grid->xm() + ghost_width) ||
727  (j < m_impl->grid->ys() - ghost_width) ||
728  (j > m_impl->grid->ys() + m_impl->grid->ym() + ghost_width) ||
729  (k >= N);
730 
731  if (out_of_range) {
732  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s(%d, %d, %d) is out of bounds",
733  m_impl->name.c_str(), i, j, k);
734  }
735 
736  if (m_array == NULL) {
737  throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s: begin_access() was not called", m_impl->name.c_str());
738  }
739 }
740 
741 //! Computes the norm of all the components of an Array.
742 /*!
743 See comment for range(); because local Vecs are VECSEQ, needs a reduce operation.
744 See src/trypetsc/localVecMax.c.
745  */
746 std::vector<double> Array::norm(int n) const {
747  std::vector<double> result(m_impl->dof);
748 
749  NormType type = int_to_normtype(n);
750 
751  if (m_impl->dof > 1) {
752  PetscErrorCode ierr = VecStrideNormAll(vec(), type, result.data());
753  PISM_CHK(ierr, "VecStrideNormAll");
754  } else {
755  PetscErrorCode ierr = VecNorm(vec(), type, result.data());
756  PISM_CHK(ierr, "VecNorm");
757  }
758 
759  if (m_impl->ghosted) {
760  // needs a reduce operation; use GlobalMax() if NORM_INFINITY,
761  // otherwise GlobalSum; carefully in NORM_2 case
762  switch (type) {
763  case NORM_1_AND_2: {
765  "Array::norm_all(...): NORM_1_AND_2"
766  " not implemented (called as %s.norm_all(...))",
767  m_impl->name.c_str());
768  }
769  case NORM_1: {
770  for (unsigned int k = 0; k < m_impl->dof; ++k) {
771  result[k] = GlobalSum(m_impl->grid->com, result[k]);
772  }
773  return result;
774  }
775  case NORM_2: {
776  for (unsigned int k = 0; k < m_impl->dof; ++k) {
777  // undo sqrt in VecNorm before sum; sum up; take sqrt
778  result[k] = sqrt(GlobalSum(m_impl->grid->com, result[k] * result[k]));
779  }
780  return result;
781  }
782  case NORM_INFINITY: {
783  for (unsigned int k = 0; k < m_impl->dof; ++k) {
784  result[k] = GlobalMax(m_impl->grid->com, result[k]);
785  }
786  return result;
787  }
788  default: {
790  "Array::norm_all(...): unknown norm type"
791  " (called as %s.norm_all(...))",
792  m_impl->name.c_str());
793  }
794  } // switch
795  } else {
796  return result;
797  }
798 }
799 
800 void Array::write(const std::string &filename) const {
801  // We expect the file to be present and ready to write into.
802  File file(m_impl->grid->com, filename,
803  string_to_backend(m_impl->grid->ctx()->config()->get_string("output.format")),
804  io::PISM_READWRITE, m_impl->grid->ctx()->pio_iosys_id());
805 
806  this->write(file);
807 }
808 
809 void Array::read(const std::string &filename, unsigned int time) {
810  File file(m_impl->grid->com, filename, io::PISM_GUESS, io::PISM_READONLY);
811  this->read(file, time);
812 }
813 
814 void Array::regrid(const std::string &filename, io::Default default_value) {
815  File file(m_impl->grid->com, filename, io::PISM_GUESS, io::PISM_READONLY);
816  this->regrid(file, default_value);
817 }
818 
819 /** Read a field from a file, interpolating onto the current grid.
820  *
821  * When `flag` is set to `CRITICAL`, stop if could not find the variable
822  * in the provided input file; `default_value` is ignored.
823  *
824  * When `flag` is set to `OPTIONAL`, fill this Array with
825  * `default_value` if could not find the variable in the provided
826  * input file.
827  *
828  * When `flag` is set to `CRITICAL_FILL_MISSING`, replace missing
829  * values matching the `_FillValue` attribute with `default_value`,
830  * stop if could not find the variable.
831  *
832  * When `flag` is set to `OPTIONAL_FILL_MISSING`, replace missing
833  * values matching the `_FillValue` attribute with `default_value`;
834  * fill the whole Array with `default_value` if could not find
835  * the variable.
836  *
837  * @param file input file
838  * @param flag regridding mode, see above
839  * @param default_value default value, meaning depends on the
840  * regridding mode flag
841  *
842  * @return 0 on success
843  */
844 void Array::regrid(const File &file, io::Default default_value) {
845  m_impl->grid->ctx()->log()->message(3, " [%s] Regridding %s...\n",
846  timestamp(m_impl->grid->com).c_str(), m_impl->name.c_str());
847  double start_time = get_time(m_impl->grid->com);
848  m_impl->grid->ctx()->profiling().begin("io.regridding");
849  try {
850  this->regrid_impl(file, default_value);
851  inc_state_counter(); // mark as modified
852  } catch (RuntimeError &e) {
853  e.add_context("regridding '%s' from '%s'",
854  this->get_name().c_str(), file.filename().c_str());
855  throw;
856  }
857  m_impl->grid->ctx()->profiling().end("io.regridding");
858  double
859  end_time = get_time(m_impl->grid->com),
860  time_spent = end_time - start_time;
861 
862  if (time_spent > 1.0) {
863  m_impl->grid->ctx()->log()->message(3, " done in %f seconds.\n", time_spent);
864  } else {
865  m_impl->grid->ctx()->log()->message(3, " done.\n");
866  }
867 }
868 
869 void Array::read(const File &file, const unsigned int time) {
870  this->read_impl(file, time);
871  inc_state_counter(); // mark as modified
872 }
873 
874 void Array::write(const File &file) const {
875  define(file, io::PISM_DOUBLE);
876 
877  MPI_Comm com = m_impl->grid->com;
878  double start_time = get_time(com);
879  write_impl(file);
880  double end_time = get_time(com);
881 
882  const double
883  minute = 60.0, // one minute in seconds
884  time_spent = end_time - start_time,
885  megabyte = pow(2, 20),
886  N = static_cast<double>(size()),
887  mb_double = static_cast<double>(sizeof(double)) * N / megabyte,
888  mb_float = static_cast<double>(sizeof(float)) * N / megabyte;
889 
890  std::string timestamp = pism::timestamp(com);
891  std::string spacer(timestamp.size(), ' ');
892  if (time_spent > 1) {
893  m_impl->grid->ctx()->log()->message(3,
894  "\n"
895  " [%s] Done writing %s (%f Mb double, %f Mb float)\n"
896  " %s in %f seconds (%f minutes).\n"
897  " %s Effective throughput: double: %f Mb/s, float: %f Mb/s.\n",
898  timestamp.c_str(), m_impl->name.c_str(), mb_double, mb_float,
899  spacer.c_str(), time_spent, time_spent / minute,
900  spacer.c_str(),
901  mb_double / time_spent, mb_float / time_spent);
902  } else {
903  m_impl->grid->ctx()->log()->message(3, " done.\n");
904  }
905 }
906 
908  // empty
909 }
910 
912  while (not m_vecs.empty()) {
913  try {
914  m_vecs.back()->end_access();
915  m_vecs.pop_back();
916  } catch (...) {
917  handle_fatal_errors(MPI_COMM_SELF);
918  }
919  }
920 }
921 
922 AccessScope::AccessScope(std::initializer_list<const PetscAccessible *> vecs) {
923  for (const auto *j : vecs) {
924  assert(j != nullptr);
925  add(*j);
926  }
927 }
928 
930  add(vec);
931 }
932 
934  vec.begin_access();
935  m_vecs.push_back(&vec);
936 }
937 
938 void AccessScope::add(const std::vector<const PetscAccessible*> &vecs) {
939  for (const auto *v : vecs) {
940  assert(v != nullptr);
941  add(*v);
942  }
943 }
944 
945 //! Return the total number of elements in the *owned* part of an array.
946 size_t Array::size() const {
947  // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
948  // zlevels.size() == 1. For 3D fields, m_impl->dof == 1 (all 3D fields are
949  // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
950  // DM object.
951 
952  size_t
953  Mx = m_impl->grid->Mx(),
954  My = m_impl->grid->My(),
955  Mz = m_impl->zlevels.size(),
956  dof = m_impl->dof;
957 
958  return Mx * My * Mz * dof;
959 }
960 
961 /*! Allocate a copy on processor zero and the scatter needed to move data.
962  */
963 std::shared_ptr<petsc::Vec> Array::allocate_proc0_copy() const {
964  PetscErrorCode ierr;
965  Vec v_proc0 = NULL;
966  Vec result = NULL;
967 
968  ierr = PetscObjectQuery((PetscObject)dm()->get(), "v_proc0", (PetscObject*)&v_proc0);
969  PISM_CHK(ierr, "PetscObjectQuery")
970  ;
971  if (v_proc0 == NULL) {
972 
973  // natural_work will be destroyed at the end of scope, but it will
974  // only decrement the reference counter incremented by
975  // PetscObjectCompose below.
976  petsc::Vec natural_work;
977  // create a work vector with natural ordering:
978  ierr = DMDACreateNaturalVector(*dm(), natural_work.rawptr());
979  PISM_CHK(ierr, "DMDACreateNaturalVector");
980 
981  // this increments the reference counter of natural_work
982  ierr = PetscObjectCompose((PetscObject)dm()->get(), "natural_work",
983  (PetscObject)((::Vec)natural_work));
984  PISM_CHK(ierr, "PetscObjectCompose");
985 
986  // scatter_to_zero will be destroyed at the end of scope, but it
987  // will only decrement the reference counter incremented by
988  // PetscObjectCompose below.
989  petsc::VecScatter scatter_to_zero;
990 
991  // initialize the scatter to processor 0 and create storage on processor 0
992  ierr = VecScatterCreateToZero(natural_work, scatter_to_zero.rawptr(),
993  &v_proc0);
994  PISM_CHK(ierr, "VecScatterCreateToZero");
995 
996  // this increments the reference counter of scatter_to_zero
997  ierr = PetscObjectCompose((PetscObject)dm()->get(), "scatter_to_zero",
998  (PetscObject)((::VecScatter)scatter_to_zero));
999  PISM_CHK(ierr, "PetscObjectCompose");
1000 
1001  // this increments the reference counter of v_proc0
1002  ierr = PetscObjectCompose((PetscObject)dm()->get(), "v_proc0",
1003  (PetscObject)v_proc0);
1004  PISM_CHK(ierr, "PetscObjectCompose");
1005 
1006  // We DO NOT call VecDestroy(v_proc0): the petsc::Vec wrapper will
1007  // take care of this.
1008  result = v_proc0;
1009  } else {
1010  // We DO NOT call VecDestroy(result): the petsc::Vec wrapper will
1011  // take care of this.
1012  ierr = VecDuplicate(v_proc0, &result);
1013  PISM_CHK(ierr, "VecDuplicate");
1014  }
1015  return std::shared_ptr<petsc::Vec>(new petsc::Vec(result));
1016 }
1017 
1018 void Array::put_on_proc0(petsc::Vec &parallel, petsc::Vec &onp0) const {
1019  PetscErrorCode ierr = 0;
1020  VecScatter scatter_to_zero = NULL;
1021  Vec natural_work = NULL;
1022 
1023  ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1024  (PetscObject*)&scatter_to_zero);
1025  PISM_CHK(ierr, "PetscObjectQuery");
1026 
1027  ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1028  (PetscObject*)&natural_work);
1029  PISM_CHK(ierr, "PetscObjectQuery");
1030 
1031  if (natural_work == NULL || scatter_to_zero == NULL) {
1032  throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling put_on_proc0");
1033  }
1034 
1035  ierr = DMDAGlobalToNaturalBegin(*dm(), parallel, INSERT_VALUES, natural_work);
1036  PISM_CHK(ierr, "DMDAGlobalToNaturalBegin");
1037 
1038  ierr = DMDAGlobalToNaturalEnd(*dm(), parallel, INSERT_VALUES, natural_work);
1039  PISM_CHK(ierr, "DMDAGlobalToNaturalEnd");
1040 
1041  ierr = VecScatterBegin(scatter_to_zero, natural_work, onp0,
1042  INSERT_VALUES, SCATTER_FORWARD);
1043  PISM_CHK(ierr, "VecScatterBegin");
1044 
1045  ierr = VecScatterEnd(scatter_to_zero, natural_work, onp0,
1046  INSERT_VALUES, SCATTER_FORWARD);
1047  PISM_CHK(ierr, "VecScatterEnd");
1048 }
1049 
1050 
1051 //! Puts a local array::Scalar on processor 0.
1052 void Array::put_on_proc0(petsc::Vec &onp0) const {
1053  if (m_impl->ghosted) {
1055  this->copy_to_vec(dm(), tmp);
1056  put_on_proc0(tmp, onp0);
1057  } else {
1058  put_on_proc0(vec(), onp0);
1059  }
1060 }
1061 
1062 void Array::get_from_proc0(petsc::Vec &onp0, petsc::Vec &parallel) const {
1063  PetscErrorCode ierr;
1064 
1065  VecScatter scatter_to_zero = NULL;
1066  Vec natural_work = NULL;
1067  ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1068  (PetscObject*)&scatter_to_zero);
1069  PISM_CHK(ierr, "PetscObjectQuery");
1070 
1071  ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1072  (PetscObject*)&natural_work);
1073  PISM_CHK(ierr, "PetscObjectQuery");
1074 
1075  if (natural_work == NULL || scatter_to_zero == NULL) {
1076  throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling get_from_proc0");
1077  }
1078 
1079  ierr = VecScatterBegin(scatter_to_zero, onp0, natural_work,
1080  INSERT_VALUES, SCATTER_REVERSE);
1081  PISM_CHK(ierr, "VecScatterBegin");
1082 
1083  ierr = VecScatterEnd(scatter_to_zero, onp0, natural_work,
1084  INSERT_VALUES, SCATTER_REVERSE);
1085  PISM_CHK(ierr, "VecScatterEnd");
1086 
1087  ierr = DMDANaturalToGlobalBegin(*dm(), natural_work, INSERT_VALUES, parallel);
1088  PISM_CHK(ierr, "DMDANaturalToGlobalBegin");
1089 
1090  ierr = DMDANaturalToGlobalEnd(*dm(), natural_work, INSERT_VALUES, parallel);
1091  PISM_CHK(ierr, "DMDANaturalToGlobalEnd");
1092 }
1093 
1094 //! Gets a local Array2 from processor 0.
1096  if (m_impl->ghosted) {
1098  get_from_proc0(onp0, tmp);
1099  global_to_local(*dm(), tmp, vec());
1100  } else {
1101  get_from_proc0(onp0, vec());
1102  }
1103  inc_state_counter(); // mark as modified
1104 }
1105 
1106 /*!
1107  * Copy data to rank 0 and compute the checksum.
1108  *
1109  * Does not use ghosts. Results should be independent of the parallel domain
1110  * decomposition.
1111  */
1112 uint64_t Array::fletcher64_serial() const {
1113 
1114  auto v = allocate_proc0_copy();
1115  put_on_proc0(*v);
1116 
1117  MPI_Comm com = m_impl->grid->ctx()->com();
1118 
1119  int rank = 0;
1120  MPI_Comm_rank(com, &rank);
1121 
1122  uint64_t result = 0;
1123  if (rank == 0) {
1124  petsc::VecArray array(*v);
1125 
1126  PetscInt size = 0;
1127  PetscErrorCode ierr = VecGetLocalSize(*v, &size);
1128  PISM_CHK(ierr, "VecGetLocalSize");
1129 
1130  result = pism::fletcher64((uint32_t*)array.get(), static_cast<size_t>(size) * 2);
1131  }
1132  MPI_Bcast(&result, 1, MPI_UINT64_T, 0, com);
1133 
1134  return result;
1135 }
1136 
1137 /*!
1138  * Compute a checksum of a vector.
1139  *
1140  * The result depends on the number of processors used.
1141  *
1142  * We assume that sizeof(double) == 2 * sizeof(uint32_t), i.e. double uses 64 bits.
1143  */
1144 uint64_t Array::fletcher64() const {
1145  static_assert(sizeof(double) == 2 * sizeof(uint32_t),
1146  "Cannot compile Array::fletcher64() (sizeof(double) != 2 * sizeof(uint32_t))");
1147 
1148  MPI_Status mpi_stat;
1149  const int checksum_tag = 42;
1150 
1151  MPI_Comm com = m_impl->grid->ctx()->com();
1152 
1153  int rank = 0;
1154  MPI_Comm_rank(com, &rank);
1155 
1156  int comm_size = 0;
1157  MPI_Comm_size(com, &comm_size);
1158 
1159  PetscInt local_size = 0;
1160  PetscErrorCode ierr = VecGetLocalSize(vec(), &local_size); PISM_CHK(ierr, "VecGetLocalSize");
1161  uint64_t sum = 0;
1162  {
1163  petsc::VecArray v(vec());
1164  // compute checksums for local patches on all ranks
1165  sum = pism::fletcher64((uint32_t*)v.get(), static_cast<size_t>(local_size) * 2);
1166  }
1167 
1168  if (rank == 0) {
1169  std::vector<uint64_t> sums(comm_size);
1170 
1171  // gather checksums of patches on rank 0
1172  sums[0] = sum;
1173  for (int r = 1; r < comm_size; ++r) {
1174  MPI_Recv(&sums[r], 1, MPI_UINT64_T, r, checksum_tag, com, &mpi_stat);
1175  }
1176 
1177  // compute the checksum of checksums
1178  sum = pism::fletcher64((uint32_t*)sums.data(), static_cast<size_t>(comm_size) * 2);
1179  } else {
1180  MPI_Send(&sum, 1, MPI_UINT64_T, 0, checksum_tag, com);
1181  }
1182 
1183  // broadcast to all ranks
1184  MPI_Bcast(&sum, 1, MPI_UINT64_T, 0, com);
1185 
1186  return sum;
1187 }
1188 
1189 std::string Array::checksum(bool serial) const {
1190  if (serial) {
1191  // unsigned long long is supposed to be at least 64 bit long
1192  return pism::printf("%016llx", (unsigned long long int)this->fletcher64_serial());
1193  }
1194  // unsigned long long is supposed to be at least 64 bit long
1195  return pism::printf("%016llx", (unsigned long long int)this->fletcher64());
1196 }
1197 
1198 void Array::print_checksum(const char *prefix, bool serial) const {
1199  auto log = m_impl->grid->ctx()->log();
1200 
1201  log->message(1, "%s%s: %s\n", prefix, m_impl->name.c_str(), checksum(serial).c_str());
1202 }
1203 
1204 //! \brief View a 2D vector field using existing PETSc viewers.
1205 void Array::view(std::vector<std::shared_ptr<petsc::Viewer> > viewers) const {
1206  PetscErrorCode ierr;
1207 
1208  if (ndims() == 3) {
1210  "cannot 'view' a 3D field '%s'",
1211  get_name().c_str());
1212  }
1213 
1214  // Get the dof=1, stencil_width=0 DMDA (components are always scalar
1215  // and we just need a global Vec):
1216  auto da2 = m_impl->grid->get_dm(1, 0);
1217 
1218  petsc::TemporaryGlobalVec tmp(da2);
1219 
1220  for (unsigned int i = 0; i < ndof(); ++i) {
1221  std::string
1222  long_name = m_impl->metadata[i]["long_name"],
1223  units = m_impl->metadata[i]["units"],
1224  output_units = m_impl->metadata[i]["output_units"],
1225  title = pism::printf("%s (%s)",
1226  long_name.c_str(),
1227  output_units.c_str());
1228 
1229  PetscViewer v = *viewers[i].get();
1230 
1231  PetscDraw draw = NULL;
1232  ierr = PetscViewerDrawGetDraw(v, 0, &draw);
1233  PISM_CHK(ierr, "PetscViewerDrawGetDraw");
1234 
1235  ierr = PetscDrawSetTitle(draw, title.c_str());
1236  PISM_CHK(ierr, "PetscDrawSetTitle");
1237 
1238  get_dof(da2, tmp, i);
1239 
1240  convert_vec(tmp, m_impl->metadata[i].unit_system(),
1241  units, output_units);
1242 
1243  double bounds[2] = {0.0, 0.0};
1244  ierr = VecMin(tmp, NULL, &bounds[0]); PISM_CHK(ierr, "VecMin");
1245  ierr = VecMax(tmp, NULL, &bounds[1]); PISM_CHK(ierr, "VecMax");
1246 
1247  if (bounds[0] == bounds[1]) {
1248  bounds[0] = -1.0;
1249  bounds[1] = 1.0;
1250  }
1251 
1252  ierr = PetscViewerDrawSetBounds(v, 1, bounds);
1253  PISM_CHK(ierr, "PetscViewerDrawSetBounds");
1254 
1255  ierr = VecView(tmp, v);
1256  PISM_CHK(ierr, "VecView");
1257  }
1258 }
1259 
1260 } // end of namespace array
1261 
1263  const std::string &spec1, const std::string &spec2) {
1264  units::Converter c(system, spec1, spec2);
1265 
1266  // has to be a PetscInt because of the VecGetLocalSize() call
1267  PetscInt data_size = 0;
1268  PetscErrorCode ierr = VecGetLocalSize(v, &data_size);
1269  PISM_CHK(ierr, "VecGetLocalSize");
1270 
1271  petsc::VecArray data(v);
1272  c.convert_doubles(data.get(), data_size);
1273 }
1274 
1275 } // end of namespace pism
VariableLookupData find_variable(const std::string &short_name, const std::string &std_name) const
Find a variable using its standard name and/or short name.
Definition: File.cc:361
std::string filename() const
Definition: File.cc:307
High-level PISM I/O class.
Definition: File.hh:56
void message(int threshold, const char format[],...) const __attribute__((format(printf
Print a message to the log.
Definition: Logger.cc:49
A basic logging class.
Definition: Logger.hh:40
void failed()
Indicates a failure of a parallel section.
virtual void begin_access() const =0
void add_context(const std::string &message)
Add a message providing some context. This way we can (sort of) get a stack trace even though C++ exc...
static RuntimeError formatted(const ErrorLocation &location, const char format[],...) __attribute__((format(printf
build a RuntimeError with a formatted message
Spatial NetCDF variable (corresponding to a 2D or 3D scalar field).
std::string get_string(const std::string &name) const
Get a string attribute.
units::System::Ptr unit_system() const
io::Type get_output_type() const
std::string get_name() const
T * rawptr()
Definition: Wrapper.hh:39
T get() const
Definition: Wrapper.hh:36
void add(const PetscAccessible &v)
Definition: Array.cc:933
std::vector< const PetscAccessible * > m_vecs
Definition: Array.hh:74
void read(const std::string &filename, unsigned int time)
Definition: Array.cc:809
void write_impl(const File &file) const
Writes an Array to a NetCDF file.
Definition: Array.cc:564
virtual void end_access() const
Checks if an Array is allocated and calls DAVecRestoreArray.
Definition: Array.cc:667
virtual void regrid_impl(const File &file, io::Default default_value)
Gets an Array from a file file, interpolating onto the current grid.
Definition: Array.cc:426
Array(std::shared_ptr< const Grid > grid, const std::string &name, Kind ghostedp, size_t dof, size_t stencil_width, const std::vector< double > &zlevels)
Definition: Array.cc:59
unsigned int ndof() const
Returns the number of degrees of freedom per grid point.
Definition: Array.cc:136
void set_interpolation_type(InterpolationType type)
Definition: Array.cc:179
void set_name(const std::string &name)
Sets the variable name to name.
Definition: Array.cc:371
void dump(const char filename[]) const
Dumps a variable to a file, overwriting this file's contents (for debugging).
Definition: Array.cc:607
std::array< double, 2 > range() const
Result: min <- min(v[j]), max <- max(v[j]).
Definition: Array.cc:194
void define(const File &file, io::Type default_type) const
Define variables corresponding to an Array in a file opened using file.
Definition: Array.cc:540
virtual void begin_access() const
Checks if an Array is allocated and calls DAVecGetArray.
Definition: Array.cc:646
void shift(double alpha)
Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
Definition: Array.cc:245
const std::vector< double > & levels() const
Definition: Array.cc:140
size_t size() const
Return the total number of elements in the owned part of an array.
Definition: Array.cc:946
void get_from_proc0(petsc::Vec &onp0)
Gets a local Array2 from processor 0.
Definition: Array.cc:1095
void * m_array
Definition: Array.hh:278
void write(const std::string &filename) const
Definition: Array.cc:800
void copy_to_vec(std::shared_ptr< petsc::DM > destination_da, petsc::Vec &destination) const
Copies v to a global vector 'destination'. Ghost points are discarded.
Definition: Array.cc:267
const std::string & get_name() const
Get the name of an Array object.
Definition: Array.cc:383
virtual ~Array()
Definition: Array.cc:105
petsc::Vec & vec() const
Definition: Array.cc:339
uint64_t fletcher64_serial() const
Definition: Array.cc:1112
void get_dof(std::shared_ptr< petsc::DM > da_result, petsc::Vec &result, unsigned int start, unsigned int count=1) const
Definition: Array.cc:277
void scale(double alpha)
Result: v <- v * alpha. Calls VecScale.
Definition: Array.cc:253
void print_checksum(const char *prefix="", bool serial=false) const
Definition: Array.cc:1198
int state_counter() const
Get the object state counter.
Definition: Array.cc:128
std::vector< int > shape() const
Definition: Array.cc:160
std::string checksum(bool serial) const
Definition: Array.cc:1189
void view(std::vector< std::shared_ptr< petsc::Viewer > > viewers) const
View a 2D vector field using existing PETSc viewers.
Definition: Array.cc:1205
std::shared_ptr< const Grid > grid() const
Definition: Array.cc:132
void set(double c)
Result: v[j] <- c for all j.
Definition: Array.cc:707
void set_dof(std::shared_ptr< petsc::DM > da_source, petsc::Vec &source, unsigned int start, unsigned int count=1)
Definition: Array.cc:302
void inc_state_counter()
Increment the object state counter.
Definition: Array.cc:151
std::shared_ptr< petsc::DM > dm() const
Definition: Array.cc:353
unsigned int ndims() const
Returns the number of spatial dimensions.
Definition: Array.cc:156
void regrid(const std::string &filename, io::Default default_value)
Definition: Array.cc:814
std::shared_ptr< petsc::Vec > allocate_proc0_copy() const
Definition: Array.cc:963
uint64_t fletcher64() const
Definition: Array.cc:1144
void put_on_proc0(petsc::Vec &onp0) const
Puts a local array::Scalar on processor 0.
Definition: Array.cc:1052
void set_begin_access_use_dof(bool flag)
Definition: Array.cc:175
void add(double alpha, const Array &x)
Result: v <- v + alpha * x. Calls VecAXPY.
Definition: Array.cc:235
std::vector< double > norm(int n) const
Computes the norm of all the components of an Array.
Definition: Array.cc:746
void update_ghosts()
Updates ghost points.
Definition: Array.cc:693
unsigned int stencil_width() const
Get the stencil width of the current Array. Returns 0 if ghosts are not available.
Definition: Array.cc:331
void checkCompatibility(const char *function, const Array &other) const
Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
Definition: Array.cc:623
void read_impl(const File &file, unsigned int time)
Reads appropriate NetCDF variable(s) into an Array.
Definition: Array.cc:493
void check_array_indices(int i, int j, unsigned int k) const
Check array indices and warn if they are out of range.
Definition: Array.cc:714
SpatialVariableMetadata & metadata(unsigned int N=0)
Returns a reference to the SpatialVariableMetadata object containing metadata for the compoment N.
Definition: Array.cc:553
Abstract class for reading, writing, allocating, and accessing a DA-based PETSc Vec (2D and 3D fields...
Definition: Array.hh:208
void report(const Logger &log, int threshold, std::shared_ptr< units::System > s) const
Definition: Grid.cc:1025
Contains parameters of an input file grid.
Definition: Grid.hh:65
bool exists() const
Definition: IO_Flags.hh:112
double * get()
Definition: Vec.cc:54
Wrapper around VecGetArray and VecRestoreArray.
Definition: Vec.hh:44
void convert_doubles(double *data, size_t length) const
Definition: Units.cc:250
std::shared_ptr< System > Ptr
Definition: Units.hh:47
#define PISM_CHK(errcode, name)
#define PISM_ERROR_LOCATION
#define n
Definition: exactTestM.c:37
void set_default_value_or_stop(const std::string &filename, const VariableMetadata &variable, io::Default default_value, const Logger &log, Vec output)
Definition: Array.cc:387
void global_to_local(petsc::DM &dm, Vec source, Vec destination)
Definition: Array.cc:49
double max(const array::Scalar &input)
Finds maximum over all the values in an array::Scalar object. Ignores ghosts.
Definition: Scalar.cc:165
double sum(const array::Scalar &input)
Sums up all the values in an array::Scalar object. Ignores ghosts.
Definition: Scalar.cc:150
Kind
What "kind" of a vector to create: with or without ghosts.
Definition: Array.hh:62
@ WITH_GHOSTS
Definition: Array.hh:62
static std::array< double, 2 > compute_range(MPI_Comm com, const double *data, size_t data_size)
Definition: Array.cc:413
static NormType int_to_normtype(int input)
Definition: Array.cc:220
double min(const array::Scalar &input)
Finds minimum over all the values in an array::Scalar object. Ignores ghosts.
Definition: Scalar.cc:193
void append_time(const File &file, const Config &config, double time_seconds)
Prepare a file for output.
Definition: io_helpers.cc:247
@ PISM_GUESS
Definition: IO_Flags.hh:56
void regrid_spatial_variable(SpatialVariableMetadata &variable, const Grid &internal_grid, const LocalInterpCtx &lic, const File &file, double *output)
Regrid from a NetCDF file into a distributed array output.
Definition: io_helpers.cc:793
void read_spatial_variable(const SpatialVariableMetadata &variable, const Grid &grid, const File &file, unsigned int time, double *output)
Read a variable from a file into an array output.
Definition: io_helpers.cc:528
@ PISM_READWRITE_CLOBBER
create a file for writing, overwrite if present
Definition: IO_Flags.hh:76
@ PISM_READONLY
open an existing file for reading only
Definition: IO_Flags.hh:72
@ PISM_READWRITE
open an existing file for reading and writing
Definition: IO_Flags.hh:74
void write_spatial_variable(const SpatialVariableMetadata &metadata, const Grid &grid, const File &file, const double *input)
Write a double array to a file.
Definition: io_helpers.cc:624
@ PISM_DOUBLE
Definition: IO_Flags.hh:52
@ PISM_NAT
Definition: IO_Flags.hh:46
void check_input_grid(const grid::InputGridInfo &input_grid, const Grid &internal_grid, const std::vector< double > &internal_z_levels)
Check that x, y, and z coordinates of the input grid are strictly increasing.
Definition: io_helpers.cc:757
void define_spatial_variable(const SpatialVariableMetadata &metadata, const Grid &grid, const File &file, io::Type default_type)
Define a NetCDF variable corresponding to a VariableMetadata object.
Definition: io_helpers.cc:467
void define_time(const File &file, const Context &ctx)
Prepare a file for output.
Definition: io_helpers.cc:213
double get_time(MPI_Comm comm)
io::Backend string_to_backend(const std::string &backend)
Definition: File.cc:57
InterpolationType
static double start_time(const Config &config, const Logger &log, const File *file, const std::string &reference_date, const std::string &calendar, const units::Unit &time_units)
Definition: Time.cc:348
void GlobalMax(MPI_Comm comm, double *local, double *result, int count)
std::string printf(const char *format,...)
uint64_t fletcher64(const uint32_t *data, size_t length)
static const double k
Definition: exactTestP.cc:42
static double end_time(const Config &config, double time_start, const std::string &calendar, const units::Unit &time_units)
Definition: Time.cc:399
std::string timestamp(MPI_Comm com)
Creates a time-stamp used for the history NetCDF attribute.
void handle_fatal_errors(MPI_Comm com)
void GlobalMin(MPI_Comm comm, double *local, double *result, int count)
void GlobalSum(MPI_Comm comm, double *local, double *result, int count)
void convert_vec(petsc::Vec &v, units::System::Ptr system, const std::string &spec1, const std::string &spec2)
Definition: Array.cc:1262
bool begin_access_use_dof
If true, use DMDAVecGetArrayDOF() in begin_access()
Definition: Array_impl.hh:92
int state_counter
Internal array::Array "revision number".
Definition: Array_impl.hh:102
bool ghosted
true if this Array is ghosted
Definition: Array_impl.hh:86
std::shared_ptr< const Grid > grid
The computational grid.
Definition: Array_impl.hh:77
InterpolationType interpolation_type
Definition: Array_impl.hh:105
bool report_range
If true, report range when regridding.
Definition: Array_impl.hh:62
gsl_interp_accel * bsearch_accel
Definition: Array_impl.hh:111
unsigned int da_stencil_width
stencil width supported by the DA
Definition: Array_impl.hh:83
std::vector< SpatialVariableMetadata > metadata
Metadata (NetCDF variable attributes)
Definition: Array_impl.hh:74
std::vector< double > zlevels
Vertical levels (for 3D fields)
Definition: Array_impl.hh:108
std::shared_ptr< petsc::DM > da
distributed mesh manager (DM)
Definition: Array_impl.hh:89
unsigned int dof
number of "degrees of freedom" per grid point
Definition: Array_impl.hh:80
int count
Definition: test_cube.c:16