summaryrefslogtreecommitdiff
path: root/sci-mathematics/petsc/files
diff options
context:
space:
mode:
authorV3n3RiX <venerix@redcorelinux.org>2020-05-14 11:09:11 +0100
committerV3n3RiX <venerix@redcorelinux.org>2020-05-14 11:09:11 +0100
commitdeba8115d2c2af26df42966b91ef04ff4dd79cde (patch)
tree9a48f42594e1a9e6b2020d5535a784314434d7a7 /sci-mathematics/petsc/files
parent38423c67c8a23f6a1bc42038193182e2da3116eb (diff)
gentoo resync : 14.05.2020
Diffstat (limited to 'sci-mathematics/petsc/files')
-rw-r--r--sci-mathematics/petsc/files/petsc-3.13.0-do_not_run_mpiexec.patch (renamed from sci-mathematics/petsc/files/petsc-3.12.1-do_not_run_mpiexec.patch)32
-rw-r--r--sci-mathematics/petsc/files/petsc-3.13.0-fix_sandbox_violation.patch (renamed from sci-mathematics/petsc/files/petsc-3.9.0-fix_sandbox_violation.patch)7
-rw-r--r--sci-mathematics/petsc/files/petsc-3.13.0-make_hypre_configure.patch (renamed from sci-mathematics/petsc/files/petsc-3.12.1-make_hypre_configure.patch)22
3 files changed, 32 insertions, 29 deletions
diff --git a/sci-mathematics/petsc/files/petsc-3.12.1-do_not_run_mpiexec.patch b/sci-mathematics/petsc/files/petsc-3.13.0-do_not_run_mpiexec.patch
index 9c3d5047b0a6..4c46658dec71 100644
--- a/sci-mathematics/petsc/files/petsc-3.12.1-do_not_run_mpiexec.patch
+++ b/sci-mathematics/petsc/files/petsc-3.13.0-do_not_run_mpiexec.patch
@@ -1,14 +1,14 @@
diff --git a/config/BuildSystem/config/packages/MPI.py b/config/BuildSystem/config/packages/MPI.py
-index c85bb504..7720eba9 100644
+index 7e424e71..ee502bd2 100644
--- a/config/BuildSystem/config/packages/MPI.py
+++ b/config/BuildSystem/config/packages/MPI.py
-@@ -209,26 +209,11 @@ shared libraries and run with --known-mpi-shared-libraries=1')
+@@ -210,28 +210,11 @@ shared libraries and run with --known-mpi-shared-libraries=1')
raise RuntimeError('Could not locate MPIEXEC - please specify --with-mpiexec option')
# Support for spaces and () in executable names; also needs to handle optional arguments at the end
# TODO: This support for spaces and () should be moved to core BuildSystem
- self.mpiexec = self.mpiexec.replace(' ', '\\ ').replace('(', '\\(').replace(')', '\\)').replace('\ -',' -')
- if (hasattr(self, 'ompi_major_version') and int(self.ompi_major_version) >= 3):
-- (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -help all', checkCommand = noCheck, timeout = 10, log = self.log)
+- (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -help all', checkCommand = noCheck, timeout = 60, log = self.log, threads = 1)
- if out.find('--oversubscribe') >=0:
- self.mpiexec = self.mpiexec + ' --oversubscribe'
+ self.mpiexec = self.mpiexec + ' --oversubscribe'
@@ -16,17 +16,19 @@ index c85bb504..7720eba9 100644
# using mpiexec environmental variables make sure mpiexec matches the MPI libraries and save the variables for testing in PetscInitialize()
# the variable HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE is not currently used. PetscInitialize() can check the existence of the environmental variable to
# determine if the program has been started with the correct mpiexec (will only be set for parallel runs so not clear how to check appropriately)
-- (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -n 1 printenv', checkCommand = noCheck, timeout = 10, log = self.log)
-- if ret: raise RuntimeError('Unable to run '+self.mpiexec+' with option "-n 1"\n'+err)
-- if out.find('MPIR_CVAR_CH3') > -1:
-- if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH");
-- self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPIR_CVAR_CH3')
-- elif out.find('MPIR_CVAR_CH3') > -1:
-- if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH");
-- self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPICH')
-- elif out.find('OMPI_COMM_WORLD_SIZE') > -1:
-- if hasattr(self,'mpich_numversion'): raise RuntimeError("Your libraries are from MPICH but it appears your mpiexec is from OpenMPI");
-- self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'OMP')
+- (out, err, ret) = Configure.executeShellCommand(self.mpiexec+' -n 1 printenv', checkCommand = noCheck, timeout = 60, threads = 1, log = self.log)
+- if ret:
+- self.logWrite('Unable to run '+self.mpiexec+' with option "-n 1 printenv"\nThis could be ok, some MPI implementations such as SGI produce a non-zero status with non-MPI programs\n'+out+err)
+- else:
+- if out.find('MPIR_CVAR_CH3') > -1:
+- if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH");
+- self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPIR_CVAR_CH3')
+- elif out.find('MPIR_CVAR_CH3') > -1:
+- if hasattr(self,'ompi_major_version'): raise RuntimeError("Your libraries are from OpenMPI but it appears your mpiexec is from MPICH");
+- self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'MPICH')
+- elif out.find('OMPI_COMM_WORLD_SIZE') > -1:
+- if hasattr(self,'mpich_numversion'): raise RuntimeError("Your libraries are from MPICH but it appears your mpiexec is from OpenMPI");
+- self.addDefine('HAVE_MPIEXEC_ENVIRONMENTAL_VARIABLE', 'OMP')
+
self.addMakeMacro('MPIEXEC', self.mpiexec)
self.mpiexec = self.mpiexec + ' -n 1'
-
diff --git a/sci-mathematics/petsc/files/petsc-3.9.0-fix_sandbox_violation.patch b/sci-mathematics/petsc/files/petsc-3.13.0-fix_sandbox_violation.patch
index 97f8dfed4d71..c0dfd53a0112 100644
--- a/sci-mathematics/petsc/files/petsc-3.9.0-fix_sandbox_violation.patch
+++ b/sci-mathematics/petsc/files/petsc-3.13.0-fix_sandbox_violation.patch
@@ -1,15 +1,16 @@
diff --git a/config/PETSc/options/installDir.py b/config/PETSc/options/installDir.py
-index 92f190d..047c85b 100644
+index 860faf0f..87898811 100755
--- a/config/PETSc/options/installDir.py
+++ b/config/PETSc/options/installDir.py
-@@ -41,12 +41,6 @@ class Configure(config.base.Configure):
+@@ -40,13 +40,6 @@ class Configure(config.base.Configure):
self.dir = os.path.abspath(os.path.expanduser(self.framework.argDB['prefix']))
self.petscDir = self.dir
self.petscArch = ''
- try:
- os.makedirs(os.path.join(self.dir,'PETScTestDirectory'))
- os.rmdir(os.path.join(self.dir,'PETScTestDirectory'))
-- except:
+- except Exception as e:
+- self.logPrint('Error trying to to test write permissions on directory '+str(e))
- self.installSudoMessage = 'You do not have write permissions to the --prefix directory '+self.dir+'\nYou will be prompted for the sudo password for any external package installs'
- self.installSudo = 'sudo '
else:
diff --git a/sci-mathematics/petsc/files/petsc-3.12.1-make_hypre_configure.patch b/sci-mathematics/petsc/files/petsc-3.13.0-make_hypre_configure.patch
index 9460d57e4617..1453fd519e81 100644
--- a/sci-mathematics/petsc/files/petsc-3.12.1-make_hypre_configure.patch
+++ b/sci-mathematics/petsc/files/petsc-3.13.0-make_hypre_configure.patch
@@ -1,18 +1,18 @@
diff --git a/config/BuildSystem/config/packages/hypre.py b/config/BuildSystem/config/packages/hypre.py
-index 869a1661..76067c8d 100644
+index 4d915c31..1b05a1ee 100644
--- a/config/BuildSystem/config/packages/hypre.py
+++ b/config/BuildSystem/config/packages/hypre.py
@@ -5,10 +5,6 @@ class Configure(config.package.GNUPackage):
def __init__(self, framework):
config.package.GNUPackage.__init__(self, framework)
- self.version = '2.18.1'
+ #self.version = '2.18.2'
- self.minversion = '2.14'
- self.versionname = 'HYPRE_RELEASE_VERSION'
- self.versioninclude = 'HYPRE_config.h'
- self.requiresversion = 1
- self.gitcommit = 'v'+self.version
+ #self.gitcommit = 'v'+self.version
+ self.gitcommit = '93baaa8c9' # v2.18.2+valgrind-fix
self.download = ['git://https://github.com/hypre-space/hypre','https://github.com/hypre-space/hypre/archive/'+self.gitcommit+'.tar.gz']
- self.functions = ['HYPRE_IJMatrixCreate']
diff --git a/include/petsc/private/petschypre.h b/include/petsc/private/petschypre.h
index 81ca7136..b403e70e 100644
--- a/include/petsc/private/petschypre.h
@@ -31,7 +31,7 @@ index 81ca7136..b403e70e 100644
With scalar type == real, HYPRE_Complex == PetscScalar;
With scalar type == complex, HYPRE_Complex is double __complex__ while PetscScalar may be std::complex<double>
diff --git a/src/mat/impls/hypre/mhypre.c b/src/mat/impls/hypre/mhypre.c
-index 8e8fe678..3533b99a 100644
+index 0f7470e6..cce3e2b0 100644
--- a/src/mat/impls/hypre/mhypre.c
+++ b/src/mat/impls/hypre/mhypre.c
@@ -15,10 +15,6 @@
@@ -42,10 +42,10 @@ index 8e8fe678..3533b99a 100644
-#define hypre_ParCSRMatrixClone(A,B) hypre_ParCSRMatrixCompleteClone(A)
-#endif
-
- PETSC_INTERN PetscErrorCode MatPtAP_IS_XAIJ(Mat,Mat,MatReuse,PetscReal,Mat*);
-
static PetscErrorCode MatHYPRE_CreateFromMat(Mat,Mat_HYPRE*);
-@@ -65,7 +61,6 @@ static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMat
+ static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat,Mat,HYPRE_IJMatrix);
+ static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat,HYPRE_IJMatrix);
+@@ -63,7 +59,6 @@ static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMat
nnz_o[i] = 0;
}
}
@@ -53,7 +53,7 @@ index 8e8fe678..3533b99a 100644
{ /* If we don't do this, the columns of the matrix will be all zeros! */
hypre_AuxParCSRMatrix *aux_matrix;
aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
-@@ -75,9 +70,6 @@ static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMat
+@@ -73,9 +68,6 @@ static PetscErrorCode MatHYPRE_IJMatrixPreallocate(Mat A_d, Mat A_o, HYPRE_IJMat
aux_matrix = (hypre_AuxParCSRMatrix*)hypre_IJMatrixTranslator(ij);
hypre_AuxParCSRMatrixNeedAux(aux_matrix) = 1;
}
@@ -63,7 +63,7 @@ index 8e8fe678..3533b99a 100644
ierr = PetscFree(nnz_d);CHKERRQ(ierr);
ierr = PetscFree(nnz_o);CHKERRQ(ierr);
}
-@@ -235,11 +227,7 @@ static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
+@@ -233,11 +225,7 @@ static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
/* need to shift the diag column indices (hdiag->j) back to global numbering since hypre is expecting this */
hjj = hdiag->j;
pjj = pdiag->j;
@@ -75,7 +75,7 @@ index 8e8fe678..3533b99a 100644
ierr = PetscArraycpy(hdiag->data,pdiag->a,pdiag->nz);CHKERRQ(ierr);
if (sameint) {
ierr = PetscArraycpy(hoffd->i,poffd->i,pA->A->rmap->n + 1);CHKERRQ(ierr);
-@@ -249,12 +237,8 @@ static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
+@@ -247,12 +235,8 @@ static PetscErrorCode MatHYPRE_IJMatrixFastCopy_MPIAIJ(Mat A, HYPRE_IJMatrix ij)
/* need to move the offd column indices (hoffd->j) back to global numbering since hypre is expecting this
If we hacked a hypre a bit more we might be able to avoid this step */