PISM, A Parallel Ice Sheet Model 2.3.0-79cae578d committed by Constantine Khrulev on 2026-03-22
Loading...
Searching...
No Matches
YacOutputWriter.cc
Go to the documentation of this file.
1/* Copyright (C) 2025, 2026 PISM Authors
2 *
3 * This file is part of PISM.
4 *
5 * PISM is free software; you can redistribute it and/or modify it under the
6 * terms of the GNU General Public License as published by the Free Software
7 * Foundation; either version 3 of the License, or (at your option) any later
8 * version.
9 *
10 * PISM is distributed in the hope that it will be useful, but WITHOUT ANY
11 * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
12 * FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
13 * details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with PISM; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
18 */
19
20#include <cstddef> // size_t
21#include <memory>
22#include <cmath>
23#include <cstring>
24#include <mpi.h>
25#include <vector>
26
27#include "pism/util/Config.hh"
28#include "pism/util/GridInfo.hh"
29#include "pism/util/Grid.hh"
30#include "pism/util/VariableMetadata.hh"
31#include "pism/util/error_handling.hh"
32#include "pism/util/io/File.hh"
33#include "pism/util/io/IO_Flags.hh"
34#include "pism/util/io/YacOutputWriter.hh"
35#include "pism/external/nlohmann/json.hpp"
36
37extern "C" {
38#include "yac.h"
39}
40
41namespace pism {
42
43namespace details {
44/*!
45 * Store variable attributes from `attributes` in a JSON object `json`.
46 */
47static void to_json(const VariableAttributes &attributes, nlohmann::json &output) {
48 for (const auto &attribute : attributes.strings) {
49 output[attribute.first] = attribute.second;
50 }
51
52 for (const auto &attribute : attributes.numbers) {
53 output[attribute.first] = attribute.second;
54 }
55}
56
57/*!
58 * Convert a PISM data type to a string that describes the same type as a NumPy dtype
59 * object (in Python).
60 */
61static std::string to_python_type(pism::io::Type input) {
62 static const std::map<pism::io::Type, std::string> type_map = {
63 { io::PISM_BYTE, "i1" }, { io::PISM_CHAR, "S1" }, { io::PISM_SHORT, "i2" },
64 { io::PISM_INT, "i4" }, { io::PISM_FLOAT, "f4" }, { io::PISM_DOUBLE, "f8" }
65 };
66
67 auto it = type_map.find(input);
68 return (it != type_map.end()) ? it->second : "None"; // "None" for NC_NAT
69}
70
71/*!
72 * Calculate global indices for local points given the start and size of the local patch
73 * and the global x size.
74 */
75static std::vector<int> patch_global_indices(unsigned int x_global_size, unsigned int x_start,
76 unsigned int x_size, unsigned int y_start,
77 unsigned int y_size) {
78 std::vector<int> indices;
79 indices.reserve((size_t)x_size * y_size);
80 for (unsigned int j = y_start; j < y_start + y_size; ++j) {
81 for (unsigned int i = x_start; i < x_start + x_size; ++i) {
82 indices.push_back((int)(j * x_global_size + i));
83 }
84 }
85 return indices;
86}
87
88/*!
89 * Return the string identifying the grid used by a variable.
90 *
91 * Uses names of the first two dimensions. (The third dimension, if present, will
92 * correspond to a vertical or some other coordinate.)
93 */
94static std::string grid_name(const VariableMetadata &variable) {
95 const auto &dims = variable.dimensions();
96 return dims[0].get_name() + "-" + dims[1].get_name();
97}
98
99/*!
100 * Free buffers used to cache numeric data sent using MPI_Isend().
101 */
102static void free_array_buffers(std::vector<double *> &buffers) {
103 for (auto *ptr : buffers) {
104 delete ptr;
105 }
106 buffers.clear();
107}
108
109/*!
110 * Compute coordinates of grid points owned by the current MPI process.
111 */
113 const std::string & proj_string, std::vector<double> &longitudes,
114 std::vector<double> &latitudes) {
115 int patch_size = (int)(grid.xm * grid.ym);
116
117 longitudes.resize(patch_size);
118 latitudes.resize(patch_size);
119
120 if (proj_string.empty()) {
121 // Generate "fake" longitudes and latitudes for the local patch of the grid. Here
122 // longitudes and latitudes range from 0 to 1 (inclusive) for the "global" grid (a local
123 // patch covers a part of this).
124 //
125 // This is sufficient for moving data from PISM to the output server and does not
126 // require projection info.
127 const auto &x = grid.x;
128 const auto &y = grid.y;
129
130 double x_min = x.front();
131 double y_min = y.front();
132 double x_span = x.back() - x_min;
133 double y_span = y.back() - y_min;
134 int it = 0;
135 for (auto p : GridPoints(grid)) {
136 const int i = p.i(), j = p.j();
137 longitudes[it] = (x[i] - x_min) / x_span;
138 latitudes[it] = (y[j] - y_min) / y_span;
139 it++;
140 }
141 } else {
142 // FIXME: make it possible to use projection info to use real lon,lat coordinates of
143 // grid points. This will be necessary for "on the fly" post-processing in the output
144 // server.
146 "output writer: non-trivial projections are not implemented yet");
147 }
148}
149
150} // namespace details
151
152// Even if we are using YAC, certain forms of interaction with the server cannot
153// be made using only YAC functionalities. Sending actions, non-gridded data
154// and metadata (after definitions) are examples of such. In order to be able
155// to send all the information to the server we need to define an intercommunicator,
156// which then allows direct MPI communication between the client and the server.
158 // At this point YAC has been initialized in the pism::initialize() and on the server
159 // side. Both client and server components have been defined.
160
161 // We get the local component communicator and a global communicator which
162 // contains all client and server processes
163 MPI_Comm global_comm = MPI_COMM_NULL;
164 {
165 const int nbr_comps = 2;
166 const char *comp_names[nbr_comps] = { "pism", "pism_output" };
167 yac_cget_comps_comm(comp_names, nbr_comps, &global_comm);
168 }
169
170 int global_size = 0;
171 MPI_Comm_size(global_comm, &global_size);
172 std::vector<int> component_leaders_ranks(global_size);
173
174 // We retrieve the process group information from both local and global communicators
175 MPI_Group local_group = MPI_GROUP_NULL, global_group = MPI_GROUP_NULL;
176 MPI_Comm_group(comm(), &local_group);
177 MPI_Comm_group(global_comm, &global_group);
178
179 // For the creation of the intercommunicator we need to set leaders on both groups.
180 // We define process 0 of each component to be the leader of its local group.
181 // We then find the corresponding rank of each leader in the global group and
182 // exchange this information between the processes.
183 // The intercomm creation is then done using this information.
184 const int local_leader_rank[] = {0};
185 int local_leader_global_rank[] = {-1};
186 MPI_Group_translate_ranks(local_group, 1, local_leader_rank,
187 global_group, local_leader_global_rank);
188 MPI_Allgather(local_leader_global_rank, 1, MPI_INT,
189 component_leaders_ranks.data(), 1, MPI_INT, global_comm);
190 int remote_leader = component_leaders_ranks.back();
191
192 int tag = 0;
193 MPI_Intercomm_create(comm(), local_leader_rank[0], global_comm, remote_leader, tag,
194 &m_intercomm);
195
196 MPI_Group_free(&local_group);
197 MPI_Group_free(&global_group);
198 MPI_Comm_free(&global_comm);
199}
200
201int YacOutputWriter::tag(const std::string &variable_name, TagTreatment flag) {
202 auto i = m_variable_tags.find(variable_name);
203 if (i != m_variable_tags.end()) {
204 return i->second;
205 }
206
207 if (flag == CREATE_NEW_TAG) {
208 int new_tag = (int)m_variable_tags.size();
209 m_variable_tags[variable_name] = new_tag;
210 return new_tag;
211 }
212
213 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Bug: no tag for variable '%s'",
214 variable_name.c_str());
215}
216
217// Initializes the YAC grid and sends the geometrical information to
218// the server so that it can also initialize its own grid
220
221 const auto grid_name = details::grid_name(variable);
222
223 if (m_point_set_id.find(grid_name) != m_point_set_id.end()) {
224 // this grid was defined already
225 return;
226 }
227
228 // Distributed grid containing the domain decomposition information
229 const auto &grid = *variable.grid_info();
230
231 {
232 nlohmann::json info;
233 info["grid_name"] = grid_name;
234
236 }
237
238 // Sends the global domain sizes to the server
239 if (m_leader) {
240 int grid_size[2] = {(int)grid.Mx, (int)grid.My};
241 m_mpi_requests.emplace_back();
242 MPI_Isend((void *) &grid_size, 2, MPI_INT, 0, 0, m_intercomm, &m_mpi_requests.back());
243 }
244
245 std::vector<double> latitudes;
246 std::vector<double> longitudes;
247 details::compute_point_coordinates(grid, "", longitudes, latitudes);
248
249 int local_patch_size = (int)latitudes.size();
250 // Gathers on the server the size of the local patch from each process
251 MPI_Gather(&local_patch_size, 1, MPI_INT, NULL, 1, MPI_INT, 0, m_intercomm);
252
253 // Translate local point indices to global point indices
254 auto patch_global_indices =
255 details::patch_global_indices(grid.Mx, grid.xs, grid.xm, grid.ys, grid.ym);
256
257 // Sends the global indices of local points to the server, followed by local latitudes and longitudes
258 MPI_Gatherv(patch_global_indices.data(), local_patch_size, MPI_INT, NULL, NULL, NULL, MPI_INT, 0,
260 MPI_Gatherv(latitudes.data(), local_patch_size, MPI_DOUBLE, NULL, NULL, NULL, MPI_DOUBLE, 0,
262 MPI_Gatherv(longitudes.data(), local_patch_size, MPI_DOUBLE, NULL, NULL, NULL, MPI_DOUBLE, 0,
264
265 // Defines the YAC grid and points using the local points
266 int cyclic_dims[] = {0, 0};
267 int nbr_vertices[] = {(int)grid.xm, (int)grid.ym};
268 int grid_id = -1;
269 int point_set_id = -1;
270 yac_cdef_grid_curve2d(grid_name.c_str(), nbr_vertices, cyclic_dims, longitudes.data(),
271 latitudes.data(), &grid_id);
272 yac_cdef_points_unstruct(grid_id, local_patch_size, YAC_LOCATION_CORNER, longitudes.data(),
273 latitudes.data(), &point_set_id);
274
275 m_point_set_id[grid_name] = point_set_id;
276}
277
278// Subroutine to define a YAC field
280
281 const auto &variable_name = variable.get_name();
282
283 // If the field has been defined already, return
284 if (m_field_ids.find(variable_name) != m_field_ids.end()) {
285 return;
286 }
287
288 int collection_size = std::max((int)variable.levels().size(), 1);
289
290 // define the field
291 {
292 int point_set_id = m_point_set_id[details::grid_name(variable)];
293
294 int field_id = -1;
295 yac_cdef_field(variable_name.c_str(), 1, &point_set_id, 1, collection_size, "PT1M",
296 YAC_TIME_UNIT_ISO_FORMAT, &field_id);
297
298 m_field_ids[variable_name] = field_id;
299 }
300
301 // tell the output server to define the field
302 {
303 nlohmann::json info;
304
305 info["variable_name"] = variable_name;
306 info["timestep"] = "PT1M";
307 info["collection_size"] = collection_size;
308 info["grid_name"] = details::grid_name(variable);
309
311 }
312}
313
314// This subroutine ends the YAC definitions phase.
315// No components, grids or fields can be defined after this.
322
324 const nlohmann::json &metadata) {
325 // Only the leader process needs to send actions to the server
326 if (not m_leader) {
327 return;
328 }
329
330 nlohmann::json message{};
331 message["action"] = action_id;
332 message["info"] = metadata;
333
334 m_text_buffers.push_back(message.dump());
335 const auto &message_string = m_text_buffers.back();
336
337 // Send the metadata string to the output server:
338 m_mpi_requests.emplace_back();
339 MPI_Isend((void *)message_string.data(), (int)message_string.length(), MPI_CHAR, 0, 0,
340 m_intercomm, &m_mpi_requests.back());
341}
342
343YacOutputWriter::YacOutputWriter(MPI_Comm comm, const Config &config)
344 : OutputWriter(comm, config) {
345
346 set_is_async(true);
347 {
348 int rank = -1;
349 MPI_Comm_rank(comm, &rank);
350 m_leader = (rank == 0);
351 }
352
354}
355
357 send_action(FINISH, {});
358
359 try {
360 waitall();
361 } catch (RuntimeError &e) {
362 // ignore failures: we should not let exceptions escape a destructor
363 }
364
366
367 delete m_field_buffer;
368
369 MPI_Comm_free(&m_intercomm);
370
371 yac_cfinalize();
372}
373
374/*!
375 * Define all the grids and send grid information to the other side.
376 */
377void YacOutputWriter::initialize_impl(const std::set<VariableMetadata> &array_variables) {
378
379 for (const auto &variable : array_variables) {
380 if (variable.grid_info() == nullptr) {
381 continue;
382 }
383
384 auto grid_name = details::grid_name(variable);
385
386 const auto &grid = *variable.grid_info();
387
388 int collection_size = std::max((int)variable.levels().size(), 1);
389 int array_size = (int)(grid.xm * grid.ym * collection_size);
390 m_field_buffer_size = std::max(array_size, m_field_buffer_size);
391
392 // define the grid (if necessary)
393 define_yac_grid(variable);
394
395 // define the YAC field
396 define_yac_field(variable);
397 }
398
400}
401
402void YacOutputWriter::define_variable_impl(const std::string &file_name,
403 const std::string &variable_name,
404 const std::vector<std::string> &dims, io::Type type,
405 const VariableAttributes &attributes) {
406
407 // If this variable was already defined for this file, return
408 if (m_defined_variable[file_name][variable_name]) {
409 return;
410 }
411
412 {
413 nlohmann::json info, nc_attributes;
414 details::to_json(attributes, nc_attributes);
415 info["attributes"] = nc_attributes;
416 info["dimensions"] = dims;
417 info["dtype"] = details::to_python_type(type);
418 info["file_name"] = file_name;
419 info["variable_name"] = variable_name;
420 info["tag"] = tag(variable_name, CREATE_NEW_TAG);
421
423 }
424
425 // Save the variable as already defined for this file
426 m_defined_variable[file_name][variable_name] = true;
427}
428
429void YacOutputWriter::append_time_impl(const std::string &file_name, double time_seconds) {
430 // Note: these values are updated *without* communication with the output server.
431 {
432 m_time_length[file_name] += 1;
433 m_last_time[file_name] = time_seconds;
434 }
435
436 {
437 nlohmann::json info;
438 info["file_name"] = file_name;
439 info["time"] = time_seconds;
441 }
442}
443
444void YacOutputWriter::append_history_impl(const std::string &file_name, const std::string &text) {
445 nlohmann::json info;
446 info["file_name"] = file_name;
447 info["history"] = text;
449}
450
451void YacOutputWriter::append_impl(const std::string &file_name) {
452
453 nlohmann::json info;
454 info["file_name"] = file_name;
455 send_action(OPEN_FILE, info);
456
457 // get file information from the output server
458 //
459 // FIXME: The Recv + Bcast pair can be replaced with a single Bcast using m_intercomm.
460 if (m_leader) {
461 MPI_Status status;
462 int time_length = -1;
463 MPI_Recv(&time_length, 1, MPI_INT, 0, 0, m_intercomm, &status);
464 double last_time = -1;
465 MPI_Recv(&last_time, 1, MPI_DOUBLE, 0, 0, m_intercomm, &status);
466
467 m_time_length[file_name] = time_length;
468 m_last_time[file_name] = last_time;
469 }
470
471 // scatter to other ranks in `comm()`:
472 MPI_Bcast(&m_time_length[file_name], 1, MPI_INT, 0, comm());
473 MPI_Bcast(&m_last_time[file_name], 1, MPI_DOUBLE, 0, comm());
474}
475
477 int error_code =
478 MPI_Waitall((int)m_mpi_requests.size(), m_mpi_requests.data(), MPI_STATUSES_IGNORE);
479 if (error_code != MPI_SUCCESS) {
480 throw RuntimeError::formatted(PISM_ERROR_LOCATION, "Fatal error in MPI_Waitall()");
481 }
482}
483
484void YacOutputWriter::sync_impl(const std::string & /*file_name*/) {
485 send_action(SYNC, {});
486
487 waitall();
488
489 m_mpi_requests.clear();
490 m_text_buffers.clear();
492
493 // FIXME: consider adding MPI_Barrier(m_intercomm)
494}
495
496void YacOutputWriter::close_impl(const std::string &file_name) {
497 nlohmann::json info;
498 info["file_name"] = file_name;
499 send_action(CLOSE_FILE, info);
500}
501
502void YacOutputWriter::define_dimension_impl(const std::string &file_name,
503 const std::string &name, unsigned int length) {
504
505 // If this dimension has been defined already for this file, return
506 if (m_defined_dimension[file_name][name]) {
507 return;
508 }
509
510 // save the length of the time dimension
511 if (name == time_name()) {
512 m_time_length[file_name] = (int)length;
513 }
514
515 // Gathers the dimension metadata and sends it to the server
516 {
517 nlohmann::json info;
518 info["file_name"] = file_name;
519 info["name"] = name;
520 info["length"] = length;
522 }
523
524 m_defined_dimension[file_name][name] = true;
525}
526
528 const std::string &file_name, const std::map<std::string, std::string> &strings,
529 const std::map<std::string, std::vector<double> > &numbers) {
530
531 // Gather the global_attributes into the JSON object and send it to the server.
532 nlohmann::json attributes_json;
533 for (const auto &attribute : strings) {
534 attributes_json[attribute.first] = attribute.second;
535 }
536
537 for (const auto &attribute : numbers) {
538 attributes_json[attribute.first] = attribute.second;
539 }
540
541 nlohmann::json file_attributes_json;
542 file_attributes_json["file_name"] = file_name;
543 file_attributes_json["attributes"] = attributes_json;
544 send_action(SET_FILE_ATTRIBUTES, file_attributes_json);
545}
546
547unsigned int YacOutputWriter::time_dimension_length_impl(const std::string &file_name) {
548 return m_time_length[file_name];
549}
550
551double YacOutputWriter::last_time_value_impl(const std::string &file_name) {
552 return m_last_time[file_name];
553}
554
555void YacOutputWriter::write_array_impl(const std::string &file_name,
556 const std::string &variable_name,
557 const std::vector<unsigned int> &start,
558 const std::vector<unsigned int> &count, const double *data) {
559
560 // Gathers the variable name into the json object and sends it
561 // to the server for identification of which variable to receive
562 nlohmann::json info;
563 info["file_name"] = file_name;
564 info["variable_name"] = variable_name;
565 info["start"] = start;
566 info["count"] = count;
567 info["tag"] = tag(variable_name);
569
570 // Non-gridded variables are sent by the leader process
571 if (m_leader) {
572 int data_size = 1;
573 for (const auto &c : count) {
574 data_size *= (int)c;
575 }
576 // Buffers the argument array so that the asynchronous operation can finish after its
577 // lifetime. These arrays are deleted using details::free_array_buffers() in the
578 // destructor and sync_impl().
579 double *buffer = new double[data_size];
580 m_buffers.push_back(buffer);
581 memcpy(buffer, data, data_size * sizeof(double));
582 m_mpi_requests.emplace_back();
583 MPI_Isend((void *)(buffer), data_size, MPI_DOUBLE, 0, tag(variable_name),
584 m_intercomm, &m_mpi_requests.back());
585 }
586}
587
588void YacOutputWriter::write_text_impl(const std::string &file_name,
589 const std::string &variable_name,
590 const std::vector<unsigned int> &start,
591 const std::vector<unsigned int> &count,
592 const std::string &input) {
593
594 // info["text"] = true indicates that this is a text variable and data should be
595 // received using the MPI_CHAR type instead of MPI_DOUBLE
596 nlohmann::json info;
597 info["file_name"] = file_name;
598 info["variable_name"] = variable_name;
599 info["start"] = start;
600 info["count"] = count;
601 info["tag"] = tag(variable_name);
602 info["text"] = true;
604
605 // Text variables are sent by the leader process
606 if (m_leader) {
607 int data_size = 1;
608 for (const auto &c : count) {
609 data_size *= (int)c;
610 }
611 // Text fields are buffered so that the asynchronous send can finish after the
612 // arguments lifetime. Since it is buffered inside of a vector, the de-allocation
613 // happens automatically at the destructor
614 m_text_buffers.push_back(input);
615 m_mpi_requests.emplace_back();
616 MPI_Isend((void *)(m_text_buffers.back().data()), data_size, MPI_CHAR, 0,
617 tag(variable_name), m_intercomm, &m_mpi_requests.back());
618 }
619}
620
621void YacOutputWriter::write_distributed_array_impl(const std::string &file_name,
622 const std::string &variable_name,
623 const double *data) {
624
625
626 auto variable = variable_info(variable_name);
627
628 const auto *grid = variable.grid_info();
629
630 // Gather the variable name into the JSON object and send it to the server for
631 // identification of which variable to receive.
632 {
633 nlohmann::json info;
634 info["file_name"] = file_name;
635 info["variable_name"] = variable_name;
636 info["ndims"] = variable.n_spatial_dimensions();
637 info["time_dependent"] = variable.get_time_dependent();
638 info["grid_name"] = details::grid_name(variable);
640 }
641
642 int x_size = (int)grid->xm;
643 int y_size = (int)grid->ym;
644 int collection_size = std::max((int)variable.levels().size(), 1);
645
646 // Assemble the "send_field" argument for yac_cput():
647 //
648 // This method comes from examples/toy_dummy/dummy_ocean_c.c in YAC's source code tree.
649 std::vector<double**> collection_data(collection_size, nullptr);
650 std::vector<double*> point_set_data(collection_size, nullptr);
651 for (int j = 0; j < collection_size; ++j) {
652 point_set_data[j] = m_field_buffer + (int)(j * (x_size * y_size));
653 collection_data[j] = &(point_set_data[j]);
654 }
655
656 // Copy data from the argument array to the send_field.
657 //
658 // Note: YAC will automatically buffer data that is passed to yac_cput().
659 {
660 // PISM indexing helpers:
661 int delta_x_p = collection_size;
662 int delta_y_p = collection_size * x_size;
663 // YAC indexing helpers:
664 int delta_x_y = 1;
665 int delta_y_y = x_size;
666 for (int c = 0; c < collection_size; c++) {
667 for (int x = 0; x < x_size; x++) {
668 for (int y = 0; y < y_size; y++) {
669 int pism_index = y * delta_y_p + x * delta_x_p + c;
670 int yac_index = y * delta_y_y + x * delta_x_y;
671
672 collection_data[c][0][yac_index] = data[pism_index];
673 }
674 }
675 }
676 }
677 // Since the output interface is only called when the output is done, all calls to
678 // yac_cput() should result in an actual data exchange.
679 //
680 // TODO: we can add a check to verify that the time is still below the simulation end
681 // Since the snapshot output calls are normally equal or smaller than the number of time
682 // steps this should work fine nonetheless
683 int info, error;
684 yac_cput(m_field_ids[variable_name], collection_size, collection_data.data(), &info, &error);
685}
686
687
688} // namespace pism
A class for storing and accessing PISM configuration flags and parameters.
Definition Config.hh:56
void set_is_async(bool flag)
const std::string & time_name() const
MPI_Comm comm() const
const VariableMetadata & variable_info(const std::string &variable_name) const
static RuntimeError formatted(const ErrorLocation &location, const char format[],...) __attribute__((format(printf
build a RuntimeError with a formatted message
std::map< std::string, std::string > strings
string and boolean attributes
std::map< std::string, std::vector< double > > numbers
scalar and array attributes
const std::vector< double > & levels() const
const grid::DistributedGridInfo * grid_info() const
std::string get_name() const
std::vector< DimensionMetadata > dimensions() const
std::vector< std::string > m_text_buffers
buffers used to send text (write_text_impl())
void initialize_impl(const std::set< VariableMetadata > &array_variables)
void define_yac_field(const VariableMetadata &variable)
void set_global_attributes_impl(const std::string &file_name, const std::map< std::string, std::string > &strings, const std::map< std::string, std::vector< double > > &numbers)
void define_variable_impl(const std::string &file_name, const std::string &variable_name, const std::vector< std::string > &dims, io::Type type, const VariableAttributes &attributes)
std::map< std::string, int > m_field_ids
YAC field ID corresponding to a particular variable (by name)
std::map< std::string, int > m_variable_tags
std::vector< double * > m_buffers
buffers used to send arrays of double
void define_dimension_impl(const std::string &file_name, const std::string &name, unsigned int length)
void define_yac_grid(const VariableMetadata &variable)
void write_distributed_array_impl(const std::string &file_name, const std::string &variable_name, const double *data)
std::vector< MPI_Request > m_mpi_requests
void close_impl(const std::string &file_name)
void append_impl(const std::string &file_name)
TagTreatment
Tags for MPI messages sending non-gridded variable data.
void write_text_impl(const std::string &file_name, const std::string &variable_name, const std::vector< unsigned int > &start, const std::vector< unsigned int > &count, const std::string &input)
void write_array_impl(const std::string &file_name, const std::string &variable_name, const std::vector< unsigned int > &start, const std::vector< unsigned int > &count, const double *data)
double last_time_value_impl(const std::string &file_name)
unsigned int time_dimension_length_impl(const std::string &file_name)
void waitall()
Call MPI_Waitall() to ensure that all buffers can be freed.
std::map< std::string, int > m_time_length
Length of the time dimension in a file.
bool m_leader
True if the current MPI process is responsible for sending non-gridded data.
void append_history_impl(const std::string &file_name, const std::string &text)
YacOutputWriter(MPI_Comm comm, const Config &config)
void append_time_impl(const std::string &file_name, double time_seconds)
std::map< std::string, int > m_point_set_id
YAC point set ID corresponding to a grid name.
void sync_impl(const std::string &file_name)
std::map< std::string, std::map< std::string, bool > > m_defined_variable
void send_action(int action_id, const nlohmann::json &metadata)
std::map< std::string, std::map< std::string, bool > > m_defined_dimension
int tag(const std::string &variable_name, TagTreatment flag=GET_EXISTING_TAG)
std::map< std::string, double > m_last_time
last time value in an output file
#define PISM_ERROR_LOCATION
static std::string to_python_type(pism::io::Type input)
static void to_json(const VariableAttributes &attributes, nlohmann::json &output)
static void free_array_buffers(std::vector< double * > &buffers)
static std::string grid_name(const VariableMetadata &variable)
void compute_point_coordinates(const grid::DistributedGridInfo &grid, const std::string &proj_string, std::vector< double > &longitudes, std::vector< double > &latitudes)
static std::vector< int > patch_global_indices(unsigned int x_global_size, unsigned int x_start, unsigned int x_size, unsigned int y_start, unsigned int y_size)
@ PISM_SHORT
Definition IO_Flags.hh:50
@ PISM_FLOAT
Definition IO_Flags.hh:52
@ PISM_DOUBLE
Definition IO_Flags.hh:53
@ PISM_BYTE
Definition IO_Flags.hh:48
@ PISM_CHAR
Definition IO_Flags.hh:49
@ DEFINE_VARIABLE
@ DEFINE_YAC_GRID
@ DEFINE_YAC_FIELD
@ SET_FILE_ATTRIBUTES
@ DEFINE_DIMENSION
@ FINISH_YAC_INITIALIZATION
@ SEND_GRIDDED_VARIABLE
std::string grid_name(const pism::File &file, const std::string &variable_name, pism::units::System::Ptr sys, bool piecewise_constant)
int count
Definition test_cube.c:16