PISM, A Parallel Ice Sheet Model 2.3.0-79cae578d committed by Constantine Khrulev on 2026-03-22
Loading...
Searching...
No Matches
Array.cc
Go to the documentation of this file.
1// Copyright (C) 2008--2026 Ed Bueler, Constantine Khroulev, and David Maxwell
2//
3// This file is part of PISM.
4//
5// PISM is free software; you can redistribute it and/or modify it under the
6// terms of the GNU General Public License as published by the Free Software
7// Foundation; either version 3 of the License, or (at your option) any later
8// version.
9//
10// PISM is distributed in the hope that it will be useful, but WITHOUT ANY
11// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12// FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
13// details.
14//
15// You should have received a copy of the GNU General Public License
16// along with PISM; if not, write to the Free Software
17// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18
19#include <cassert>
20
21#include <cmath>
22#include <cstddef>
23#include <memory>
24#include <petscdraw.h>
25#include <string>
26#include <vector>
27
28#include "pism/util/array/Array.hh"
29#include "pism/util/array/Array_impl.hh"
30
31#include "pism/util/Config.hh"
32#include "pism/util/Grid.hh"
33#include "pism/util/Time.hh"
34
35#include "pism/util/Context.hh"
36#include "pism/util/Logger.hh"
37#include "pism/util/Profiling.hh"
38#include "pism/util/VariableMetadata.hh"
39#include "pism/util/error_handling.hh"
40#include "pism/util/io/File.hh"
41#include "pism/util/io/IO_Flags.hh"
42#include "pism/util/io/io_helpers.hh"
43#include "pism/util/petscwrappers/VecScatter.hh"
44#include "pism/util/petscwrappers/DM.hh"
45#include "pism/util/petscwrappers/Viewer.hh"
46#include "pism/util/pism_utilities.hh"
47
48#include "pism/util/InputInterpolation.hh"
49#include "pism/util/io/OutputWriter.hh"
50#include "pism/util/io/SynchronousOutputWriter.hh"
51
52namespace pism {
53
54namespace array {
55
56void global_to_local(petsc::DM &dm, Vec source, Vec destination) {
57 PetscErrorCode ierr;
58
59 ierr = DMGlobalToLocalBegin(dm, source, INSERT_VALUES, destination);
60 PISM_CHK(ierr, "DMGlobalToLocalBegin");
61
62 ierr = DMGlobalToLocalEnd(dm, source, INSERT_VALUES, destination);
63 PISM_CHK(ierr, "DMGlobalToLocalEnd");
64}
65
66Array::Array(std::shared_ptr<const Grid> grid, const std::string &name, Kind ghostedp, size_t dof,
67 size_t stencil_width, const std::vector<double> &zlevels) {
68 m_impl = new Impl();
69 m_array = nullptr;
70
71 m_impl->name = name;
72 m_impl->grid = grid;
73 m_impl->ghosted = (ghostedp == WITH_GHOSTS);
74 m_impl->dof = dof;
75 m_impl->zlevels = zlevels;
76
77 const auto &config = grid->ctx()->config();
78
79 auto max_stencil_width = static_cast<size_t>(config->get_number("grid.max_stencil_width"));
80 if ((dof != 1) or (stencil_width > max_stencil_width)) {
81 // use the requested stencil width *if* we have to
83 } else {
84 // otherwise use the "standard" stencil width
85 m_impl->da_stencil_width = max_stencil_width;
86 }
87
88 auto system = m_impl->grid->ctx()->unit_system();
89 if (m_impl->dof > 1) {
90 // dof > 1: this is a 2D vector
91 for (unsigned int j = 0; j < m_impl->dof; ++j) {
92 m_impl->metadata.push_back({ system, pism::printf("%s[%d]", name.c_str(), j), *grid });
93 }
94 } else {
95 // both 2D and 3D vectors
96 m_impl->metadata = { { system, name, *grid, zlevels } };
97 }
98
99 if (zlevels.size() > 1) {
100 m_impl->bsearch_accel = gsl_interp_accel_alloc();
101 if (m_impl->bsearch_accel == NULL) {
102 throw RuntimeError(PISM_ERROR_LOCATION, "Failed to allocate a GSL interpolation accelerator");
103 }
104 }
105}
106
108 assert(m_impl->access_counter == 0);
109
110 if (m_impl->bsearch_accel != nullptr) {
111 gsl_interp_accel_free(m_impl->bsearch_accel);
112 m_impl->bsearch_accel = nullptr;
113 }
114
115 delete m_impl;
116 m_impl = nullptr;
117}
118
119//! \brief Get the object state counter.
120/*!
121 * This method returns the "revision number" of an Array.
122 *
123 * It can be used to determine it a field was updated and if a certain
124 * computation needs to be re-done. One example is computing the smoothed bed
125 * for the SIA computation, which is only necessary if the bed deformation code
126 * fired.
127 *
128 * See also inc_state_counter().
129 */
131 return m_impl->state_counter;
132}
133
134std::shared_ptr<const Grid> Array::grid() const {
135 return m_impl->grid;
136}
137
138unsigned int Array::ndof() const {
139 return m_impl->dof;
140}
141
142const std::vector<double> &Array::levels() const {
143 return m_impl->zlevels;
144}
145
146//! \brief Increment the object state counter.
147/*!
148 * See the documentation of get_state_counter(). This method is the
149 * *only* way to manually increment the state counter. It is also
150 * automatically updated by Array methods that are known to
151 * change stored values.
152 */
156
157//! Returns the number of spatial dimensions.
158unsigned int Array::ndims() const {
159 return levels().empty() ? 2 : 3;
160}
161
162std::vector<int> Array::shape() const {
163
164 auto grid = m_impl->grid;
165
166 if (ndims() == 3) {
167 return { (int)grid->My(), (int)grid->Mx(), (int)levels().size() };
168 }
169
170 if (ndof() == 1) {
171 return { (int)grid->My(), (int)grid->Mx() };
172 }
173
174 return { (int)grid->My(), (int)grid->Mx(), (int)ndof() };
175}
176
180
182 if (not(type == LINEAR or type == NEAREST)) {
183 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid interpolation type: %d", (int)type);
184 }
186}
187
188/** Convert from `int` to PETSc's `NormType`.
189 *
190 *
191 * @param[in] input norm type as an integer
192 *
193 * @return norm type as PETSc's `NormType`.
194 */
195static NormType int_to_normtype(int input) {
196 switch (input) {
197 case NORM_1:
198 return NORM_1;
199 case NORM_2:
200 return NORM_2;
201 case NORM_INFINITY:
202 return NORM_INFINITY;
203 default:
204 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid norm type: %d", input);
205 }
206}
207
208//! Result: v <- v + alpha * x. Calls VecAXPY.
209void Array::add(double alpha, const Array &x) {
210 checkCompatibility("add", x);
211
212 PetscErrorCode ierr = VecAXPY(vec(), alpha, x.vec());
213 PISM_CHK(ierr, "VecAXPY");
214
215 inc_state_counter(); // mark as modified
216}
217
218//! Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
219void Array::shift(double alpha) {
220 PetscErrorCode ierr = VecShift(vec(), alpha);
221 PISM_CHK(ierr, "VecShift");
222
223 inc_state_counter(); // mark as modified
224}
225
226//! Result: v <- v * alpha. Calls VecScale.
227void Array::scale(double alpha) {
228 PetscErrorCode ierr = VecScale(vec(), alpha);
229 PISM_CHK(ierr, "VecScale");
230
231 inc_state_counter(); // mark as modified
232}
233
234//! Copies v to a global vector 'destination'. Ghost points are discarded.
235/*! This is potentially dangerous: make sure that `destination` has the same
236 dimensions as the current Array.
237
238 DMLocalToGlobalBegin/End is broken in PETSc 3.5, so we roll our
239 own.
240 */
241void Array::copy_to_vec(std::shared_ptr<petsc::DM> destination_da, petsc::Vec &destination) const {
242 // m_dof > 1 for vector, staggered grid 2D fields, etc. In this case
243 // zlevels.size() == 0. For 3D fields, m_dof == 1 (all 3D fields are
244 // scalar) and zlevels.size() corresponds to dof of the underlying PETSc
245 // DM object. So we want the bigger of the two numbers here.
246 unsigned int N = std::max((size_t)ndof(), levels().size());
247
248 this->get_dof(destination_da, destination, 0, N);
249}
250
251void Array::get_dof(std::shared_ptr<petsc::DM> da_result, petsc::Vec &result, unsigned int start,
252 unsigned int count) const {
253 if (start >= m_impl->dof) {
254 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
255 }
256
257 petsc::DMDAVecArrayDOF tmp_res(da_result, result), tmp_v(dm(), vec());
258
259 double ***result_a = static_cast<double ***>(tmp_res.get()),
260 ***source_a = static_cast<double ***>(tmp_v.get());
261
262 ParallelSection loop(m_impl->grid->com);
263 try {
264 for (auto p : m_impl->grid->points()) {
265 const int i = p.i(), j = p.j();
266 PetscErrorCode ierr =
267 PetscMemcpy(result_a[j][i], &source_a[j][i][start], count * sizeof(PetscScalar));
268 PISM_CHK(ierr, "PetscMemcpy");
269 }
270 } catch (...) {
271 loop.failed();
272 }
273 loop.check();
274}
275
276void Array::set_dof(std::shared_ptr<petsc::DM> da_source, petsc::Vec &source, unsigned int start,
277 unsigned int count) {
278 if (start >= m_impl->dof) {
279 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "invalid argument (start); got %d", start);
280 }
281
282 petsc::DMDAVecArrayDOF tmp_src(da_source, source), tmp_v(dm(), vec());
283
284 double ***source_a = static_cast<double ***>(tmp_src.get()),
285 ***result_a = static_cast<double ***>(tmp_v.get());
286
287 ParallelSection loop(m_impl->grid->com);
288 try {
289 for (auto p : m_impl->grid->points()) {
290 const int i = p.i(), j = p.j();
291 PetscErrorCode ierr =
292 PetscMemcpy(&result_a[j][i][start], source_a[j][i], count * sizeof(PetscScalar));
293 PISM_CHK(ierr, "PetscMemcpy");
294 }
295 } catch (...) {
296 loop.failed();
297 }
298 loop.check();
299
300 inc_state_counter(); // mark as modified
301}
302
303//! @brief Get the stencil width of the current Array. Returns 0
304//! if ghosts are not available.
305unsigned int Array::stencil_width() const {
306 if (m_impl->ghosted) {
307 return m_impl->da_stencil_width;
308 }
309
310 return 0;
311}
312
314 if (m_impl->v.get() == nullptr) {
315 PetscErrorCode ierr = 0;
316 if (m_impl->ghosted) {
317 ierr = DMCreateLocalVector(*dm(), m_impl->v.rawptr());
318 PISM_CHK(ierr, "DMCreateLocalVector");
319 } else {
320 ierr = DMCreateGlobalVector(*dm(), m_impl->v.rawptr());
321 PISM_CHK(ierr, "DMCreateGlobalVector");
322 }
323 }
324 return m_impl->v;
325}
326
327std::shared_ptr<petsc::DM> Array::dm() const {
328 if (m_impl->da == nullptr) {
329 // dof > 1 for vector, staggered grid 2D fields, etc. In this case levels().size() ==
330 // 0. For 3D fields, dof == 1 (all 3D fields are scalar) and levels().size()
331 // corresponds to dof of the underlying PETSc DM object.
332 auto da_dof = std::max(levels().size(), (size_t)ndof());
333
334 // initialize the da member:
335 m_impl->da = grid()->get_dm(da_dof, m_impl->da_stencil_width);
336 }
337 return m_impl->da;
338}
339
340//! Sets the variable name to `name`.
341/**
342 * This is the "overall" name of a field. This is **not** the same as the
343 * NetCDF variable name. Use `metadata(...).set_name(...)` to set that.
344 */
345void Array::set_name(const std::string &name) {
346 m_impl->name = name;
347}
348
349//! @brief Get the name of an Array object.
350/**
351 * This is the name used to refer to this object in PISM (e.g. via the
352 * Vars class), **not** the name of the corresponding NetCDF variable.
353 * (The problem is that one Array instance may correspond to
354 * several NetCDF variables. Use metadata(...).get_name() to get the
355 * name of NetCDF variables an Array is saved to.)
356 */
357const std::string &Array::get_name() const {
358 return m_impl->name;
359}
360
361void set_default_value_or_stop(const VariableMetadata &variable, io::Default default_value,
362 const Logger &log, Vec output) {
363
364 if (default_value.exists()) {
365 // If it is optional, fill with the provided default value.
366 log.message(2, " variable %s (%s) is not found: using the default constant\n",
367 variable.get_name().c_str(), variable.get_string("long_name").c_str());
368
369 PetscErrorCode ierr = VecSet(output, default_value);
370 PISM_CHK(ierr, "VecSet");
371 } else {
372 // if it's critical, print an error message and stop
373 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Can't find variable '%s'",
374 variable.get_name().c_str());
375 }
376}
377
378//! Gets an Array from a file `file`, interpolating onto the current grid.
379/*! Stops if the variable was not found and `critical` == true.
380 */
381void Array::regrid_impl(const File &file, io::Default default_value) {
382
383 assert(ndims() == 2);
384
385 auto log = grid()->ctx()->log();
386
387 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
388 // and we just need a global Vec):
389 auto dm_for_io = ndof() == 1 ? dm() : grid()->get_dm(1, 0);
390
391 // a temporary one-component vector that uses a compatible domain decomposition
392 petsc::TemporaryGlobalVec tmp(dm_for_io);
393
394 for (unsigned int j = 0; j < ndof(); ++j) {
395 auto variable = metadata(j);
396
397 auto V = file.find_variable(variable.get_name(), variable["standard_name"]);
398
399 if (V.exists) {
400 auto interp = grid()->get_interpolation(levels(), file, V.name, m_impl->interpolation_type);
401
402 int last_record = -1;
403 interp->regrid(variable, file, last_record, *grid(), tmp);
404 } else {
405 set_default_value_or_stop(variable, default_value, *log, tmp);
406 }
407
408 set_dof(dm_for_io, tmp, j);
409 } // end of the loop over degrees of freedom
410
411 // The calls above only set the values owned by a processor, so we need to
412 // communicate if m_has_ghosts == true:
413 if (m_impl->ghosted) {
415 }
416}
417
418//! Reads appropriate NetCDF variable(s) into an Array.
419void Array::read_impl(const File &file, const unsigned int time) {
420
421 auto log = grid()->ctx()->log();
422 log->message(4, " Reading %s...\n", m_impl->name.c_str());
423
424 if (ndof() == 1) {
425 // This takes are of scalar variables (both 2D and 3D).
426 if (m_impl->ghosted) {
428
429 petsc::VecArray tmp_array(tmp);
430 io::read_spatial_variable(metadata(0), *grid(), file, time, tmp_array.get());
431
432 global_to_local(*dm(), tmp, vec());
433 } else {
434 petsc::VecArray v_array(vec());
435 io::read_spatial_variable(metadata(0), *grid(), file, time, v_array.get());
436 }
437 return;
438 }
439
440 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
441 // and we just need a global Vec):
442 auto da2 = grid()->get_dm(1, 0);
443
444 // A temporary one-component vector, distributed across processors
445 // the same way v is
447
448 for (unsigned int j = 0; j < ndof(); ++j) {
449
450 {
451 petsc::VecArray tmp_array(tmp);
452 io::read_spatial_variable(metadata(j), *grid(), file, time, tmp_array.get());
453 }
454
455 set_dof(da2, tmp, j);
456 }
457
458 // The calls above only set the values owned by a processor, so we need to
459 // communicate if m_impl->ghosted is true:
460 if (m_impl->ghosted) {
462 }
463}
464
465//! @brief Returns a reference to the VariableMetadata object
466//! containing metadata for the compoment N.
468 assert(N < m_impl->dof);
469 return m_impl->metadata[N];
470}
471
472const VariableMetadata &Array::metadata(unsigned int N) const {
473 assert(N < m_impl->dof);
474 return m_impl->metadata[N];
475}
476
477std::vector<VariableMetadata> Array::all_metadata() const {
478 return m_impl->metadata;
479}
480
481//! Writes an Array to a NetCDF file.
482void Array::write_impl(const OutputFile &file) const {
483 auto log = grid()->ctx()->log();
484 auto time = timestamp(m_impl->grid->com);
485
486 // get the unit converter from internal to output units
487 auto unit_converter = [this](unsigned int j) {
488 auto ctx = grid()->ctx();
489 if (ctx->config()->get_flag("output.use_MKS")) {
490 // use internal units
491 return std::make_shared<units::Converter>();
492 }
493
494 std::string units = metadata(j)["units"];
495 std::string output_units = metadata(j)["output_units"];
496
497 bool use_output_units =
498 (not units.empty() and not output_units.empty() and units != output_units);
499
500 if (use_output_units) {
501 return std::make_shared<units::Converter>(ctx->unit_system(), units, output_units);
502 }
503
504 return std::make_shared<units::Converter>();
505 };
506
507 // 3D arrays have one or more levels, collections of fields have 0 levels.
508 size_t n_levels = levels().empty() ? 1 : levels().size();
509 size_t local_array_size = grid()->xm() * grid()->ym() * n_levels;
510
511 // Scalar 2D and 3D arrays:
512 if (ndof() == 1) {
513 log->message(3, "[%s] Writing %s...\n", time.c_str(), metadata(0).get_name().c_str());
514
516
517 this->copy_to_vec(dm(), tmp);
518
519 petsc::VecArray tmp_array(tmp);
520
521 unit_converter(0)->convert_doubles(tmp_array.get(), local_array_size);
522
523 file.write_distributed_array(metadata(0).get_name(), tmp_array.get());
524
525 return;
526 }
527
528 // 2D arrays with more than one degree of freedom (vectors, scalars on the staggered
529 // grid, collections of scalars)
530 {
531 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
532 // and we just need a global Vec):
533 auto da2 = grid()->get_dm(1, 0);
534
535 // a temporary one-component vector, distributed across processors
536 // the same way v is
538
539 for (unsigned int j = 0; j < ndof(); ++j) {
540 get_dof(da2, tmp, j);
541
542 petsc::VecArray tmp_array(tmp);
543
544 log->message(3, "[%s] Writing %s...\n", time.c_str(), metadata(j).get_name().c_str());
545
546 unit_converter(j)->convert_doubles(tmp_array.get(), local_array_size);
547
548 file.write_distributed_array(metadata(j).get_name(), tmp_array.get());
549 }
550 }
551}
552
553//! Dumps a variable to a file, overwriting this file's contents (for debugging).
554void Array::dump(const char filename[]) const {
555 auto ctx = grid()->ctx();
556 auto writer = std::make_shared<SynchronousOutputWriter>(ctx->com(), *ctx->config());
557 writer->initialize({}, true);
558
559 OutputFile file(writer, filename);
560
561 if (metadata(0).get_time_dependent()) {
562 auto time = ctx->time();
563 file.define_variable(time->metadata());
564 file.append_time(time->current());
565 }
566
567 for (unsigned int k = 0; k < ndof(); ++k) {
569 }
570
571 write(file);
572}
573
574//! Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
575void Array::checkCompatibility(const char* func, const Array &other) const {
576 PetscErrorCode ierr;
577 // We have to use PetscInt because of VecGetSizes below.
578 PetscInt X_size, Y_size;
579
580 if (m_impl->dof != other.m_impl->dof) {
581 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Array::%s(...): operands have different numbers of degrees of freedom",
582 func);
583 }
584
585 ierr = VecGetSize(vec(), &X_size);
586 PISM_CHK(ierr, "VecGetSize");
587
588 ierr = VecGetSize(other.vec(), &Y_size);
589 PISM_CHK(ierr, "VecGetSize");
590
591 if (X_size != Y_size) {
592 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Array::%s(...): incompatible Vec sizes (called as %s.%s(%s))",
593 func, m_impl->name.c_str(), func, other.m_impl->name.c_str());
594 }
595}
596
597//! Checks if an Array is allocated and calls DAVecGetArray.
599
600 if (m_impl->access_counter < 0) {
601 throw RuntimeError(PISM_ERROR_LOCATION, "Array::begin_access(): m_access_counter < 0");
602 }
603
604 if (m_impl->access_counter == 0) {
605 PetscErrorCode ierr;
607 ierr = DMDAVecGetArrayDOF(*dm(), vec(), &m_array);
608 PISM_CHK(ierr, "DMDAVecGetArrayDOF");
609 } else {
610 ierr = DMDAVecGetArray(*dm(), vec(), &m_array);
611 PISM_CHK(ierr, "DMDAVecGetArray");
612 }
613 }
614
616}
617
618//! Checks if an Array is allocated and calls DAVecRestoreArray.
619void Array::end_access() const {
620 PetscErrorCode ierr;
621
622 if (m_array == NULL) {
624 "Array::end_access(): a == NULL (looks like begin_access() was not called)");
625 }
626
627 if (m_impl->access_counter < 0) {
628 throw RuntimeError(PISM_ERROR_LOCATION, "Array::end_access(): m_access_counter < 0");
629 }
630
632 if (m_impl->access_counter == 0) {
634 ierr = DMDAVecRestoreArrayDOF(*dm(), vec(), &m_array);
635 PISM_CHK(ierr, "DMDAVecRestoreArrayDOF");
636 } else {
637 ierr = DMDAVecRestoreArray(*dm(), vec(), &m_array);
638 PISM_CHK(ierr, "DMDAVecRestoreArray");
639 }
640 m_array = NULL;
641 }
642}
643
644//! Updates ghost points.
646 PetscErrorCode ierr;
647 if (not m_impl->ghosted) {
648 return;
649 }
650
651 ierr = DMLocalToLocalBegin(*dm(), vec(), INSERT_VALUES, vec());
652 PISM_CHK(ierr, "DMLocalToLocalBegin");
653
654 ierr = DMLocalToLocalEnd(*dm(), vec(), INSERT_VALUES, vec());
655 PISM_CHK(ierr, "DMLocalToLocalEnd");
656}
657
658//! Result: v[j] <- c for all j.
659void Array::set(const double c) {
660 PetscErrorCode ierr = VecSet(vec(),c);
661 PISM_CHK(ierr, "VecSet");
662
663 inc_state_counter(); // mark as modified
664}
665
666void Array::check_array_indices(int i, int j, unsigned int k) const {
667 double ghost_width = 0;
668 if (m_impl->ghosted) {
669 ghost_width = m_impl->da_stencil_width;
670 }
671 // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
672 // levels().size() == 0. For 3D fields, ndof() == 1 (all 3D fields are
673 // scalar) and levels().size() corresponds to dof of the underlying PETSc
674 // DM object. So we want the bigger of the two numbers here.
675 unsigned int N = std::max((size_t)ndof(), levels().size());
676
677 bool out_of_range = (i < m_impl->grid->xs() - ghost_width) ||
678 (i > m_impl->grid->xs() + m_impl->grid->xm() + ghost_width) ||
679 (j < m_impl->grid->ys() - ghost_width) ||
680 (j > m_impl->grid->ys() + m_impl->grid->ym() + ghost_width) ||
681 (k >= N);
682
683 if (out_of_range) {
684 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s(%d, %d, %d) is out of bounds",
685 m_impl->name.c_str(), i, j, k);
686 }
687
688 if (m_array == NULL) {
689 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "%s: begin_access() was not called",
690 m_impl->name.c_str());
691 }
692}
693
694//! Computes the norm of all the components of an Array.
695/*!
696See comment for range(); because local Vecs are VECSEQ, needs a reduce operation.
697See src/trypetsc/localVecMax.c.
698 */
699std::vector<double> Array::norm(int n) const {
700 std::vector<double> result(m_impl->dof);
701
702 NormType type = int_to_normtype(n);
703
704 if (m_impl->dof > 1) {
705 PetscErrorCode ierr = VecStrideNormAll(vec(), type, result.data());
706 PISM_CHK(ierr, "VecStrideNormAll");
707 } else {
708 PetscErrorCode ierr = VecNorm(vec(), type, result.data());
709 PISM_CHK(ierr, "VecNorm");
710 }
711
712 if (m_impl->ghosted) {
713 // needs a reduce operation; use GlobalMax() if NORM_INFINITY,
714 // otherwise GlobalSum; carefully in NORM_2 case
715 switch (type) {
716 case NORM_1_AND_2: {
718 "Array::norm_all(...): NORM_1_AND_2"
719 " not implemented (called as %s.norm_all(...))",
720 m_impl->name.c_str());
721 }
722 case NORM_1: {
723 for (unsigned int k = 0; k < m_impl->dof; ++k) {
724 result[k] = GlobalSum(m_impl->grid->com, result[k]);
725 }
726 return result;
727 }
728 case NORM_2: {
729 for (unsigned int k = 0; k < m_impl->dof; ++k) {
730 // undo sqrt in VecNorm before sum; sum up; take sqrt
731 result[k] = sqrt(GlobalSum(m_impl->grid->com, result[k] * result[k]));
732 }
733 return result;
734 }
735 case NORM_INFINITY: {
736 for (unsigned int k = 0; k < m_impl->dof; ++k) {
737 result[k] = GlobalMax(m_impl->grid->com, result[k]);
738 }
739 return result;
740 }
741 default: {
743 "Array::norm_all(...): unknown norm type"
744 " (called as %s.norm_all(...))",
745 m_impl->name.c_str());
746 }
747 } // switch
748 } else {
749 return result;
750 }
751}
752
753void Array::read(const std::string &filename, unsigned int time) {
754 File file(m_impl->grid->com, filename, io::PISM_GUESS, io::PISM_READONLY);
755 this->read(file, time);
756}
757
758void Array::regrid(const std::string &filename, io::Default default_value) {
759 File file(m_impl->grid->com, filename, io::PISM_GUESS, io::PISM_READONLY);
760 this->regrid(file, default_value);
761}
762
763static void check_range(petsc::Vec &v,
765 const std::string &filename,
766 const Logger &log,
767 bool report_range) {
768 PetscErrorCode ierr = 0;
769
770 double min = 0.0;
771 double max = 0.0;
772
773 ierr = VecMin(v, NULL, &min);
774 PISM_CHK(ierr, "VecMin");
775 ierr = VecMax(v, NULL, &max);
776 PISM_CHK(ierr, "VecMax");
777
778 // Check the range and warn the user if needed:
779 metadata.check_range(filename, min, max);
780 if (report_range) {
781 metadata.report_range(log, min, max);
782 }
783}
784
785
786/** Read a field from a file, interpolating onto the current grid.
787 *
788 * When `flag` is set to `CRITICAL`, stop if could not find the variable
789 * in the provided input file; `default_value` is ignored.
790 *
791 * When `flag` is set to `OPTIONAL`, fill this Array with
792 * `default_value` if could not find the variable in the provided
793 * input file.
794 *
795 * When `flag` is set to `CRITICAL_FILL_MISSING`, replace missing
796 * values matching the `_FillValue` attribute with `default_value`,
797 * stop if could not find the variable.
798 *
799 * When `flag` is set to `OPTIONAL_FILL_MISSING`, replace missing
800 * values matching the `_FillValue` attribute with `default_value`;
801 * fill the whole Array with `default_value` if could not find
802 * the variable.
803 *
804 * @param file input file
805 * @param flag regridding mode, see above
806 * @param default_value default value, meaning depends on the
807 * regridding mode flag
808 *
809 * @return 0 on success
810 */
811void Array::regrid(const File &file, io::Default default_value) {
812 auto log = m_impl->grid->ctx()->log();
813
814 log->message(3, " [%s] Regridding %s...\n", timestamp(m_impl->grid->com).c_str(),
815 m_impl->name.c_str());
816 double start_time = get_time(m_impl->grid->com);
817 m_impl->grid->ctx()->profiling().begin("io.regridding");
818 try {
819 this->regrid_impl(file, default_value);
820 inc_state_counter(); // mark as modified
821
822 if (ndims() == 2) {
823 for (unsigned k = 0; k < ndof(); ++k) {
824 auto dm = grid()->get_dm(1, 0);
825
827
828 get_dof(dm, v, k);
829
830 check_range(v, metadata(k), file.name(), *log, m_impl->report_range);
831 }
832 } else {
833 check_range(vec(), metadata(0), file.name(), *log, m_impl->report_range);
834 }
835
836
837 } catch (RuntimeError &e) {
838 e.add_context("regridding '%s' from '%s'",
839 this->get_name().c_str(), file.name().c_str());
840 throw;
841 }
842 m_impl->grid->ctx()->profiling().end("io.regridding");
843 double
844 end_time = get_time(m_impl->grid->com),
845 time_spent = end_time - start_time;
846
847 if (time_spent > 1.0) {
848 log->message(3, " done in %f seconds.\n", time_spent);
849 } else {
850 log->message(3, " done.\n");
851 }
852}
853
854void Array::read(const File &file, const unsigned int time) {
855 this->read_impl(file, time);
856 inc_state_counter(); // mark as modified
857}
858
859void Array::write(const OutputFile &file) const {
860
861 MPI_Comm com = m_impl->grid->com;
862 double start_time = get_time(com);
863 write_impl(file);
864 double end_time = get_time(com);
865
866 const double
867 minute = 60.0, // one minute in seconds
868 time_spent = end_time - start_time,
869 megabyte = pow(2, 20),
870 N = static_cast<double>(size()),
871 mb_double = static_cast<double>(sizeof(double)) * N / megabyte,
872 mb_float = static_cast<double>(sizeof(float)) * N / megabyte;
873
874 std::string timestamp = pism::timestamp(com);
875 std::string spacer(timestamp.size(), ' ');
876 if (time_spent > 1) {
877 m_impl->grid->ctx()->log()->message(3,
878 "\n"
879 " [%s] Done writing %s (%f Mb double, %f Mb float)\n"
880 " %s in %f seconds (%f minutes).\n"
881 " %s Effective throughput: double: %f Mb/s, float: %f Mb/s.\n",
882 timestamp.c_str(), m_impl->name.c_str(), mb_double, mb_float,
883 spacer.c_str(), time_spent, time_spent / minute,
884 spacer.c_str(),
885 mb_double / time_spent, mb_float / time_spent);
886 } else {
887 m_impl->grid->ctx()->log()->message(3, " done.\n");
888 }
889}
890
892 // empty
893}
894
896 while (not m_vecs.empty()) {
897 try {
898 m_vecs.back()->end_access();
899 m_vecs.pop_back();
900 } catch (...) {
901 handle_fatal_errors(MPI_COMM_SELF);
902 }
903 }
904}
905
906AccessScope::AccessScope(std::initializer_list<const PetscAccessible *> vecs) {
907 for (const auto *j : vecs) {
908 assert(j != nullptr);
909 add(*j);
910 }
911}
912
914 add(vec);
915}
916
918 vec.begin_access();
919 m_vecs.push_back(&vec);
920}
921
922void AccessScope::add(const std::vector<const PetscAccessible*> &vecs) {
923 for (const auto *v : vecs) {
924 assert(v != nullptr);
925 add(*v);
926 }
927}
928
929//! Return the total number of elements in the *owned* part of an array.
930size_t Array::size() const {
931 // m_impl->dof > 1 for vector, staggered grid 2D fields, etc. In this case
932 // levels().size() == 0. For 3D fields, m_impl->dof == 1 (all 3D fields are
933 // scalar) and levels().size() corresponds to dof of the underlying PETSc
934 // DM object.
935
936 size_t
937 Mx = grid()->Mx(),
938 My = grid()->My(),
939 Mz = levels().empty() ? 1 : levels().size(),
940 dof = m_impl->dof;
941
942 return Mx * My * Mz * dof;
943}
944
945/*! Allocate a copy on processor zero and the scatter needed to move data.
946 */
947std::shared_ptr<petsc::Vec> Array::allocate_proc0_copy() const {
948 PetscErrorCode ierr;
949 Vec v_proc0 = NULL;
950 Vec result = NULL;
951
952 ierr = PetscObjectQuery((PetscObject)dm()->get(), "v_proc0", (PetscObject*)&v_proc0);
953 PISM_CHK(ierr, "PetscObjectQuery")
954 ;
955 if (v_proc0 == NULL) {
956
957 // natural_work will be destroyed at the end of scope, but it will
958 // only decrement the reference counter incremented by
959 // PetscObjectCompose below.
960 petsc::Vec natural_work;
961 // create a work vector with natural ordering:
962 ierr = DMDACreateNaturalVector(*dm(), natural_work.rawptr());
963 PISM_CHK(ierr, "DMDACreateNaturalVector");
964
965 // this increments the reference counter of natural_work
966 ierr = PetscObjectCompose((PetscObject)dm()->get(), "natural_work",
967 (PetscObject)((::Vec)natural_work));
968 PISM_CHK(ierr, "PetscObjectCompose");
969
970 // scatter_to_zero will be destroyed at the end of scope, but it
971 // will only decrement the reference counter incremented by
972 // PetscObjectCompose below.
973 petsc::VecScatter scatter_to_zero;
974
975 // initialize the scatter to processor 0 and create storage on processor 0
976 ierr = VecScatterCreateToZero(natural_work, scatter_to_zero.rawptr(),
977 &v_proc0);
978 PISM_CHK(ierr, "VecScatterCreateToZero");
979
980 // this increments the reference counter of scatter_to_zero
981 ierr = PetscObjectCompose((PetscObject)dm()->get(), "scatter_to_zero",
982 (PetscObject)((::VecScatter)scatter_to_zero));
983 PISM_CHK(ierr, "PetscObjectCompose");
984
985 // this increments the reference counter of v_proc0
986 ierr = PetscObjectCompose((PetscObject)dm()->get(), "v_proc0",
987 (PetscObject)v_proc0);
988 PISM_CHK(ierr, "PetscObjectCompose");
989
990 // We DO NOT call VecDestroy(v_proc0): the petsc::Vec wrapper will
991 // take care of this.
992 result = v_proc0;
993 } else {
994 // We DO NOT call VecDestroy(result): the petsc::Vec wrapper will
995 // take care of this.
996 ierr = VecDuplicate(v_proc0, &result);
997 PISM_CHK(ierr, "VecDuplicate");
998 }
999 return std::shared_ptr<petsc::Vec>(new petsc::Vec(result));
1000}
1001
1002void Array::put_on_proc0(petsc::Vec &parallel, petsc::Vec &onp0) const {
1003 PetscErrorCode ierr = 0;
1004 VecScatter scatter_to_zero = NULL;
1005 Vec natural_work = NULL;
1006
1007 ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1008 (PetscObject*)&scatter_to_zero);
1009 PISM_CHK(ierr, "PetscObjectQuery");
1010
1011 ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1012 (PetscObject*)&natural_work);
1013 PISM_CHK(ierr, "PetscObjectQuery");
1014
1015 if (natural_work == NULL || scatter_to_zero == NULL) {
1016 throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling put_on_proc0");
1017 }
1018
1019 ierr = DMDAGlobalToNaturalBegin(*dm(), parallel, INSERT_VALUES, natural_work);
1020 PISM_CHK(ierr, "DMDAGlobalToNaturalBegin");
1021
1022 ierr = DMDAGlobalToNaturalEnd(*dm(), parallel, INSERT_VALUES, natural_work);
1023 PISM_CHK(ierr, "DMDAGlobalToNaturalEnd");
1024
1025 ierr = VecScatterBegin(scatter_to_zero, natural_work, onp0,
1026 INSERT_VALUES, SCATTER_FORWARD);
1027 PISM_CHK(ierr, "VecScatterBegin");
1028
1029 ierr = VecScatterEnd(scatter_to_zero, natural_work, onp0,
1030 INSERT_VALUES, SCATTER_FORWARD);
1031 PISM_CHK(ierr, "VecScatterEnd");
1032}
1033
1034
1035//! Puts a local array::Scalar on processor 0.
1037 if (m_impl->ghosted) {
1039 this->copy_to_vec(dm(), tmp);
1040 put_on_proc0(tmp, onp0);
1041 } else {
1042 put_on_proc0(vec(), onp0);
1043 }
1044}
1045
1046void Array::get_from_proc0(petsc::Vec &onp0, petsc::Vec &parallel) const {
1047 PetscErrorCode ierr;
1048
1049 VecScatter scatter_to_zero = NULL;
1050 Vec natural_work = NULL;
1051 ierr = PetscObjectQuery((PetscObject)dm()->get(), "scatter_to_zero",
1052 (PetscObject*)&scatter_to_zero);
1053 PISM_CHK(ierr, "PetscObjectQuery");
1054
1055 ierr = PetscObjectQuery((PetscObject)dm()->get(), "natural_work",
1056 (PetscObject*)&natural_work);
1057 PISM_CHK(ierr, "PetscObjectQuery");
1058
1059 if (natural_work == NULL || scatter_to_zero == NULL) {
1060 throw RuntimeError(PISM_ERROR_LOCATION, "call allocate_proc0_copy() before calling get_from_proc0");
1061 }
1062
1063 ierr = VecScatterBegin(scatter_to_zero, onp0, natural_work,
1064 INSERT_VALUES, SCATTER_REVERSE);
1065 PISM_CHK(ierr, "VecScatterBegin");
1066
1067 ierr = VecScatterEnd(scatter_to_zero, onp0, natural_work,
1068 INSERT_VALUES, SCATTER_REVERSE);
1069 PISM_CHK(ierr, "VecScatterEnd");
1070
1071 ierr = DMDANaturalToGlobalBegin(*dm(), natural_work, INSERT_VALUES, parallel);
1072 PISM_CHK(ierr, "DMDANaturalToGlobalBegin");
1073
1074 ierr = DMDANaturalToGlobalEnd(*dm(), natural_work, INSERT_VALUES, parallel);
1075 PISM_CHK(ierr, "DMDANaturalToGlobalEnd");
1076}
1077
1078//! Gets a local Array2 from processor 0.
1080 if (m_impl->ghosted) {
1082 get_from_proc0(onp0, tmp);
1083 global_to_local(*dm(), tmp, vec());
1084 } else {
1085 get_from_proc0(onp0, vec());
1086 }
1087 inc_state_counter(); // mark as modified
1088}
1089
1090/*!
1091 * Copy data to rank 0 and compute the checksum.
1092 *
1093 * Does not use ghosts. Results should be independent of the parallel domain
1094 * decomposition.
1095 */
1097
1098 auto v = allocate_proc0_copy();
1099 put_on_proc0(*v);
1100
1101 MPI_Comm com = m_impl->grid->ctx()->com();
1102
1103 int rank = 0;
1104 MPI_Comm_rank(com, &rank);
1105
1106 uint64_t result = 0;
1107 if (rank == 0) {
1108 petsc::VecArray array(*v);
1109
1110 PetscInt size = 0;
1111 PetscErrorCode ierr = VecGetLocalSize(*v, &size);
1112 PISM_CHK(ierr, "VecGetLocalSize");
1113
1114 result = pism::fletcher64((uint32_t*)array.get(), static_cast<size_t>(size) * 2);
1115 }
1116 MPI_Bcast(&result, 1, MPI_UINT64_T, 0, com);
1117
1118 return result;
1119}
1120
1121/*!
1122 * Compute a checksum of a vector.
1123 *
1124 * The result depends on the number of processors used.
1125 *
1126 * We assume that sizeof(double) == 2 * sizeof(uint32_t), i.e. double uses 64 bits.
1127 */
1128uint64_t Array::fletcher64() const {
1129 static_assert(sizeof(double) == 2 * sizeof(uint32_t),
1130 "Cannot compile Array::fletcher64() (sizeof(double) != 2 * sizeof(uint32_t))");
1131
1132 MPI_Status mpi_stat;
1133 const int checksum_tag = 42;
1134
1135 MPI_Comm com = m_impl->grid->ctx()->com();
1136
1137 int rank = 0;
1138 MPI_Comm_rank(com, &rank);
1139
1140 int comm_size = 0;
1141 MPI_Comm_size(com, &comm_size);
1142
1143 PetscInt local_size = 0;
1144 PetscErrorCode ierr = VecGetLocalSize(vec(), &local_size); PISM_CHK(ierr, "VecGetLocalSize");
1145 uint64_t sum = 0;
1146 {
1147 petsc::VecArray v(vec());
1148 // compute checksums for local patches on all ranks
1149 sum = pism::fletcher64((uint32_t*)v.get(), static_cast<size_t>(local_size) * 2);
1150 }
1151
1152 if (rank == 0) {
1153 std::vector<uint64_t> sums(comm_size);
1154
1155 // gather checksums of patches on rank 0
1156 sums[0] = sum;
1157 for (int r = 1; r < comm_size; ++r) {
1158 MPI_Recv(&sums[r], 1, MPI_UINT64_T, r, checksum_tag, com, &mpi_stat);
1159 }
1160
1161 // compute the checksum of checksums
1162 sum = pism::fletcher64((uint32_t*)sums.data(), static_cast<size_t>(comm_size) * 2);
1163 } else {
1164 MPI_Send(&sum, 1, MPI_UINT64_T, 0, checksum_tag, com);
1165 }
1166
1167 // broadcast to all ranks
1168 MPI_Bcast(&sum, 1, MPI_UINT64_T, 0, com);
1169
1170 return sum;
1171}
1172
1173std::string Array::checksum(bool serial) const {
1174 if (serial) {
1175 // unsigned long long is supposed to be at least 64 bit long
1176 return pism::printf("%016llx", (unsigned long long int)this->fletcher64_serial());
1177 }
1178 // unsigned long long is supposed to be at least 64 bit long
1179 return pism::printf("%016llx", (unsigned long long int)this->fletcher64());
1180}
1181
1182void Array::print_checksum(const char *prefix, bool serial) const {
1183 auto log = m_impl->grid->ctx()->log();
1184
1185 log->message(1, "%s%s: %s\n", prefix, m_impl->name.c_str(), checksum(serial).c_str());
1186}
1187
1188//! \brief View a 2D vector field using existing PETSc viewers.
1189void Array::view(std::vector<std::shared_ptr<petsc::Viewer> > viewers) const {
1190 PetscErrorCode ierr;
1191
1192 if (ndims() == 3) {
1194 "cannot 'view' a 3D field '%s'",
1195 get_name().c_str());
1196 }
1197
1198 // Get the dof=1, stencil_width=0 DMDA (components are always scalar
1199 // and we just need a global Vec):
1200 auto da2 = m_impl->grid->get_dm(1, 0);
1201
1203
1204 for (unsigned int i = 0; i < ndof(); ++i) {
1205 std::string
1206 long_name = m_impl->metadata[i]["long_name"],
1207 units = m_impl->metadata[i]["units"],
1208 output_units = m_impl->metadata[i]["output_units"],
1209 title = pism::printf("%s (%s)",
1210 long_name.c_str(),
1211 output_units.c_str());
1212
1213 PetscViewer v = *viewers[i].get();
1214
1215 PetscDraw draw = NULL;
1216 ierr = PetscViewerDrawGetDraw(v, 0, &draw);
1217 PISM_CHK(ierr, "PetscViewerDrawGetDraw");
1218
1219 ierr = PetscDrawSetTitle(draw, title.c_str());
1220 PISM_CHK(ierr, "PetscDrawSetTitle");
1221
1222 get_dof(da2, tmp, i);
1223
1224 convert_vec(tmp, m_impl->metadata[i].unit_system(),
1225 units, output_units);
1226
1227 double bounds[2] = {0.0, 0.0};
1228 ierr = VecMin(tmp, NULL, &bounds[0]); PISM_CHK(ierr, "VecMin");
1229 ierr = VecMax(tmp, NULL, &bounds[1]); PISM_CHK(ierr, "VecMax");
1230
1231 if (bounds[0] == bounds[1]) {
1232 bounds[0] = -1.0;
1233 bounds[1] = 1.0;
1234 }
1235
1236 ierr = PetscViewerDrawSetBounds(v, 1, bounds);
1237 PISM_CHK(ierr, "PetscViewerDrawSetBounds");
1238
1239 ierr = VecView(tmp, v);
1240 PISM_CHK(ierr, "VecView");
1241 }
1242}
1243
1244std::set<VariableMetadata> metadata(std::initializer_list<const Array *> vecs) {
1245 std::set<VariableMetadata> result;
1246 for (const auto *vec : vecs) {
1247 for (const auto &var : vec->all_metadata()) {
1248 result.insert(var);
1249 }
1250 }
1251 return result;
1252}
1253
1254} // end of namespace array
1255
1257 const std::string &spec1, const std::string &spec2) {
1258 units::Converter c(system, spec1, spec2);
1259
1260 // has to be a PetscInt because of the VecGetLocalSize() call
1261 PetscInt data_size = 0;
1262 PetscErrorCode ierr = VecGetLocalSize(v, &data_size);
1263 PISM_CHK(ierr, "VecGetLocalSize");
1264
1265 petsc::VecArray data(v);
1266 c.convert_doubles(data.get(), data_size);
1267}
1268
1269
1270} // end of namespace pism
VariableLookupData find_variable(const std::string &short_name, const std::string &std_name) const
Find a variable using its standard name and/or short name.
Definition File.cc:328
std::string name() const
Definition File.cc:274
High-level PISM I/O class.
Definition File.hh:57
void message(int threshold, const char format[],...) const __attribute__((format(printf
Print a message to the log.
Definition Logger.cc:50
A basic logging class.
Definition Logger.hh:40
void write_distributed_array(const std::string &variable_name, const double *input) const
Definition OutputFile.cc:56
void append_time(double time_seconds) const
Definition OutputFile.cc:41
void define_variable(const VariableMetadata &variable) const
Definition OutputFile.cc:31
void failed()
Indicates a failure of a parallel section.
virtual void begin_access() const =0
void add_context(const std::string &message)
Add a message providing some context. This way we can (sort of) get a stack trace even though C++ exc...
static RuntimeError formatted(const ErrorLocation &location, const char format[],...) __attribute__((format(printf
build a RuntimeError with a formatted message
std::string get_string(const std::string &name) const
Get a string attribute.
std::string get_name() const
T * rawptr()
Definition Wrapper.hh:34
T get() const
Definition Wrapper.hh:31
void add(const PetscAccessible &v)
Definition Array.cc:917
std::vector< const PetscAccessible * > m_vecs
Definition Array.hh:75
void read(const std::string &filename, unsigned int time)
Definition Array.cc:753
virtual void end_access() const
Checks if an Array is allocated and calls DAVecRestoreArray.
Definition Array.cc:619
virtual void regrid_impl(const File &file, io::Default default_value)
Gets an Array from a file file, interpolating onto the current grid.
Definition Array.cc:381
void write_impl(const OutputFile &file) const
Writes an Array to a NetCDF file.
Definition Array.cc:482
Array(std::shared_ptr< const Grid > grid, const std::string &name, Kind ghostedp, size_t dof, size_t stencil_width, const std::vector< double > &zlevels)
Definition Array.cc:66
unsigned int ndof() const
Returns the number of degrees of freedom per grid point.
Definition Array.cc:138
void set_interpolation_type(InterpolationType type)
Definition Array.cc:181
void set_name(const std::string &name)
Sets the variable name to name.
Definition Array.cc:345
void dump(const char filename[]) const
Dumps a variable to a file, overwriting this file's contents (for debugging).
Definition Array.cc:554
void write(const OutputFile &file) const
Definition Array.cc:859
virtual void begin_access() const
Checks if an Array is allocated and calls DAVecGetArray.
Definition Array.cc:598
void shift(double alpha)
Result: v[j] <- v[j] + alpha for all j. Calls VecShift.
Definition Array.cc:219
const std::vector< double > & levels() const
Definition Array.cc:142
size_t size() const
Return the total number of elements in the owned part of an array.
Definition Array.cc:930
void get_from_proc0(petsc::Vec &onp0)
Gets a local Array2 from processor 0.
Definition Array.cc:1079
void copy_to_vec(std::shared_ptr< petsc::DM > destination_da, petsc::Vec &destination) const
Copies v to a global vector 'destination'. Ghost points are discarded.
Definition Array.cc:241
const std::string & get_name() const
Get the name of an Array object.
Definition Array.cc:357
virtual ~Array()
Definition Array.cc:107
petsc::Vec & vec() const
Definition Array.cc:313
uint64_t fletcher64_serial() const
Definition Array.cc:1096
void get_dof(std::shared_ptr< petsc::DM > da_result, petsc::Vec &result, unsigned int start, unsigned int count=1) const
Definition Array.cc:251
void scale(double alpha)
Result: v <- v * alpha. Calls VecScale.
Definition Array.cc:227
void print_checksum(const char *prefix="", bool serial=false) const
Definition Array.cc:1182
int state_counter() const
Get the object state counter.
Definition Array.cc:130
std::vector< int > shape() const
Definition Array.cc:162
std::string checksum(bool serial) const
Definition Array.cc:1173
void view(std::vector< std::shared_ptr< petsc::Viewer > > viewers) const
View a 2D vector field using existing PETSc viewers.
Definition Array.cc:1189
std::shared_ptr< const Grid > grid() const
Definition Array.cc:134
void set(double c)
Result: v[j] <- c for all j.
Definition Array.cc:659
void set_dof(std::shared_ptr< petsc::DM > da_source, petsc::Vec &source, unsigned int start, unsigned int count=1)
Definition Array.cc:276
void inc_state_counter()
Increment the object state counter.
Definition Array.cc:153
std::shared_ptr< petsc::DM > dm() const
Definition Array.cc:327
unsigned int ndims() const
Returns the number of spatial dimensions.
Definition Array.cc:158
void regrid(const std::string &filename, io::Default default_value)
Definition Array.cc:758
std::vector< VariableMetadata > all_metadata() const
Definition Array.cc:477
std::shared_ptr< petsc::Vec > allocate_proc0_copy() const
Definition Array.cc:947
uint64_t fletcher64() const
Definition Array.cc:1128
void put_on_proc0(petsc::Vec &onp0) const
Puts a local array::Scalar on processor 0.
Definition Array.cc:1036
void set_begin_access_use_dof(bool flag)
Definition Array.cc:177
void add(double alpha, const Array &x)
Result: v <- v + alpha * x. Calls VecAXPY.
Definition Array.cc:209
std::vector< double > norm(int n) const
Computes the norm of all the components of an Array.
Definition Array.cc:699
void update_ghosts()
Updates ghost points.
Definition Array.cc:645
VariableMetadata & metadata(unsigned int N=0)
Returns a reference to the VariableMetadata object containing metadata for the compoment N.
Definition Array.cc:467
unsigned int stencil_width() const
Get the stencil width of the current Array. Returns 0 if ghosts are not available.
Definition Array.cc:305
void checkCompatibility(const char *function, const Array &other) const
Checks if two Arrays have compatible sizes, dimensions and numbers of degrees of freedom.
Definition Array.cc:575
void read_impl(const File &file, unsigned int time)
Reads appropriate NetCDF variable(s) into an Array.
Definition Array.cc:419
void check_array_indices(int i, int j, unsigned int k) const
Check array indices and warn if they are out of range.
Definition Array.cc:666
Abstract class for reading, writing, allocating, and accessing a DA-based PETSc Vec (2D and 3D fields...
Definition Array.hh:209
bool exists() const
Definition IO_Flags.hh:109
double * get()
Definition Vec.cc:55
Wrapper around VecGetArray and VecRestoreArray.
Definition Vec.hh:47
void convert_doubles(double *data, size_t length) const
Definition Units.cc:255
std::shared_ptr< System > Ptr
Definition Units.hh:47
#define PISM_CHK(errcode, name)
#define PISM_ERROR_LOCATION
#define n
Definition exactTestM.c:37
std::set< VariableMetadata > metadata(std::initializer_list< const Array * > vecs)
Definition Array.cc:1244
void global_to_local(petsc::DM &dm, Vec source, Vec destination)
Definition Array.cc:56
double max(const array::Scalar &input)
Finds maximum over all the values in an array::Scalar object. Ignores ghosts.
Definition Scalar.cc:165
void set_default_value_or_stop(const VariableMetadata &variable, io::Default default_value, const Logger &log, Vec output)
Definition Array.cc:361
double sum(const array::Scalar &input)
Sums up all the values in an array::Scalar object. Ignores ghosts.
Definition Scalar.cc:150
Kind
What "kind" of a vector to create: with or without ghosts.
Definition Array.hh:63
@ WITH_GHOSTS
Definition Array.hh:63
static NormType int_to_normtype(int input)
Definition Array.cc:195
double min(const array::Scalar &input)
Finds minimum over all the values in an array::Scalar object. Ignores ghosts.
Definition Scalar.cc:193
static void check_range(petsc::Vec &v, const VariableMetadata &metadata, const std::string &filename, const Logger &log, bool report_range)
Definition Array.cc:763
void read_spatial_variable(const VariableMetadata &variable, const Grid &grid, const File &file, unsigned int time, double *output)
Read a variable from a file into an array output.
@ PISM_GUESS
Definition IO_Flags.hh:57
@ PISM_READONLY
open an existing file for reading only
Definition IO_Flags.hh:69
double get_time(MPI_Comm comm)
void GlobalMax(MPI_Comm comm, double *local, double *result, int count)
std::string printf(const char *format,...)
uint64_t fletcher64(const uint32_t *data, size_t length)
static const double k
Definition exactTestP.cc:42
static double end_time(const Config &config, double time_start, const std::string &calendar, const units::Unit &time_units)
Definition Time.cc:395
static void report_range(const std::vector< double > &data, const VariableMetadata &metadata, const Logger &log)
Report the range of a time-series stored in data.
std::string timestamp(MPI_Comm com)
Creates a time-stamp used for the history NetCDF attribute.
void handle_fatal_errors(MPI_Comm com)
static double start_time(const Config &config, const File *file, const std::string &reference_date, const std::string &calendar, const units::Unit &time_units)
Definition Time.cc:349
void GlobalSum(MPI_Comm comm, double *local, double *result, int count)
void convert_vec(petsc::Vec &v, units::System::Ptr system, const std::string &spec1, const std::string &spec2)
Definition Array.cc:1256
bool begin_access_use_dof
If true, use DMDAVecGetArrayDOF() in begin_access()
Definition Array_impl.hh:94
int state_counter
Internal array::Array "revision number".
bool ghosted
true if this Array is ghosted
Definition Array_impl.hh:86
std::vector< VariableMetadata > metadata
Metadata (NetCDF variable attributes)
Definition Array_impl.hh:74
std::shared_ptr< const Grid > grid
The computational grid.
Definition Array_impl.hh:77
InterpolationType interpolation_type
bool report_range
If true, report range when regridding.
Definition Array_impl.hh:62
gsl_interp_accel * bsearch_accel
unsigned int da_stencil_width
stencil width supported by the DA
Definition Array_impl.hh:83
std::vector< double > zlevels
Vertical levels (for 3D fields)
std::shared_ptr< petsc::DM > da
Definition Array_impl.hh:91
unsigned int dof
number of "degrees of freedom" per grid point
Definition Array_impl.hh:80
int count
Definition test_cube.c:16