/usr/lib/petscdir/3.7.7/x86_64-linux-gnu-real-debug/include/petsc/private/sfimpl.h is in libpetsc3.7.7-dbg 3.7.7+dfsg1-2build5.
This file is owned by root:root, with mode 0o644.
The actual contents of the file can be viewed below.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 | #if !defined(_PETSCSFIMPL_H)
#define _PETSCSFIMPL_H
#include <petscsf.h>
#include <petsc/private/petscimpl.h>
#include <petscviewer.h>
PETSC_EXTERN PetscLogEvent PETSCSF_SetGraph, PETSCSF_BcastBegin, PETSCSF_BcastEnd, PETSCSF_ReduceBegin, PETSCSF_ReduceEnd, PETSCSF_FetchAndOpBegin, PETSCSF_FetchAndOpEnd;
struct _PetscSFOps {
PetscErrorCode (*Reset)(PetscSF);
PetscErrorCode (*Destroy)(PetscSF);
PetscErrorCode (*SetUp)(PetscSF);
PetscErrorCode (*SetFromOptions)(PetscOptionItems*,PetscSF);
PetscErrorCode (*View)(PetscSF,PetscViewer);
PetscErrorCode (*Duplicate)(PetscSF,PetscSFDuplicateOption,PetscSF);
PetscErrorCode (*BcastBegin)(PetscSF,MPI_Datatype,const void*,void*);
PetscErrorCode (*BcastEnd)(PetscSF,MPI_Datatype,const void*,void*);
PetscErrorCode (*ReduceBegin)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
PetscErrorCode (*ReduceEnd)(PetscSF,MPI_Datatype,const void*,void*,MPI_Op);
PetscErrorCode (*FetchAndOpBegin)(PetscSF,MPI_Datatype,void*,const void*,void*,MPI_Op);
PetscErrorCode (*FetchAndOpEnd)(PetscSF,MPI_Datatype,void*,const void *,void *,MPI_Op);
};
struct _p_PetscSF {
PETSCHEADER(struct _PetscSFOps);
PetscInt nroots; /* Number of root vertices on current process (candidates for incoming edges) */
PetscInt nleaves; /* Number of leaf vertices on current process (this process specifies a root for each leaf) */
PetscInt *mine; /* Location of leaves in leafdata arrays provided to the communication routines */
PetscInt *mine_alloc;
PetscInt minleaf,maxleaf;
PetscSFNode *remote; /* Remote references to roots for each local leaf */
PetscSFNode *remote_alloc;
PetscInt nranks; /* Number of ranks owning roots connected to my leaves */
PetscMPIInt *ranks; /* List of ranks referenced by "remote" */
PetscInt *roffset; /* Array of length nranks+1, offset in rmine/rremote for each rank */
PetscInt *rmine; /* Concatenated array holding local indices referencing each remote rank */
PetscInt *rremote; /* Concatenated array holding remote indices referenced for each remote rank */
PetscBool degreeknown; /* The degree is currently known, do not have to recompute */
PetscInt *degree; /* Degree of each of my root vertices */
PetscInt *degreetmp; /* Temporary local array for computing degree */
PetscBool rankorder; /* Sort ranks for gather and scatter operations */
MPI_Group ingroup; /* Group of processes connected to my roots */
MPI_Group outgroup; /* Group of processes connected to my leaves */
PetscSF multi; /* Internal graph used to implement gather and scatter operations */
PetscBool graphset; /* Flag indicating that the graph has been set, required before calling communication routines */
PetscBool setupcalled; /* Type and communication structures have been set up */
void *data; /* Pointer to implementation */
};
PETSC_EXTERN PetscBool PetscSFRegisterAllCalled;
PETSC_EXTERN PetscErrorCode PetscSFRegisterAll(void);
PETSC_EXTERN PetscErrorCode MPIPetsc_Type_unwrap(MPI_Datatype,MPI_Datatype*,PetscBool*);
PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare(MPI_Datatype,MPI_Datatype,PetscBool*);
PETSC_EXTERN PetscErrorCode MPIPetsc_Type_compare_contig(MPI_Datatype,MPI_Datatype,PetscInt*);
#endif
|