module: loaded site/slurm
module: loaded site/tmpdir
module: loaded site/hummel
module: loaded env/system-gcc
RRZ: re-using existing module setup (RRZ_SW_INIT present)
module: loaded molpro/2015.1.0
+ export HOME=/work/fcda023
+ HOME=/work/fcda023
+ cd /work/fcda023/rad
++ which molpro
+ /bin/sh -x /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro a2_nevpt2.mop
+ MOLPRO_PREFIX=/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra %x'
+ HOSTFILE_FORMAT=%N
+ envlist=LD_LIBRARY_PATH
+ exe=/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro.exe
+ USER=fcda023
++ hostname
+ '[' xnode396 '!=' x ']'
++ hostname
+ HOST=node396
+ AIXTHREAD_SCOPE=s
+ '[' x '!=' x ']'
+ hostfile=/scratch/fcda023.31804/procgrp.73647
+ MOLPRO_OPTIONS_FILE=
+ MOLPRO_NOARG=1
+ logfile=1
+ MP_NODES=0
+ MP_PROCS=1
+ '[' x '!=' x ']'
+ '[' x '!=' x ']'
+ '[' x '!=' x ']'
+ '[' x '!=' x ']'
+ '[' x '!=' x ']'
+ '[' x '!=' x ']'
+ nodefile=
+ nodefile=/tmp/molpro.nodefile.31804.73647
+ srun -l /bin/hostname
+ sort -n
+ awk '{print $2}'
+ nodelist=
+ NODELIST=
+ OMP_NUM_THREADS=1
+ proc=0
+ RT_GRQ=ON
+ verbose=0
+ envlist='LD_LIBRARY_PATH AIXTHREAD_SCOPE MOLPRO_PREFIX MP_NODES MP_PROCS MP_TASKS_PER_NODE MOLPRO_NOARG MOLPRO_OPTIONS MOLPRO_OPTIONS_FILE MPI_MAX_CLUSTER_SIZE MV2_ENABLE_AFFINITY RT_GRQ TMPDIR XLSMPOPTS'
+ test x '!=' x
+ database=0
+ test xa2_nevpt2.mop '!=' x
+ case $1 in
+ MOLPRO_OPTIONS=' a2_nevpt2.mop'
+ shift
+ test x '!=' x
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro.exe
++ sed -e 's/\/[^\/]*$//g'
+ export PATH=/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin:/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin:/sw/batch/slurm/14.11.8/bin:/sw/rrz/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin
+ PATH=/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin:/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin:/sw/batch/slurm/14.11.8/bin:/sw/rrz/bin:/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin
+ '[' 'x/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra %x' = x ']'
+ mode=parallel
+ '[' x0 = x1 ']'
+ MP_PROCS=1
+ '[' x '!=' x ']'
+ '[' x '!=' x ']'
+ XLSMPOPTS=parthds=1
+ '[' x = x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra %x
++ sed -e 's/ -npernode %p//'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra %x'
+ '[' x%N '!=' x ']'
++ echo %N
++ sed -e 's/%d/$working_dir/g'
+ HOSTFILE_FORMAT=%N
++ echo %N
++ sed -e 's/%N/$hostname/g'
+ HOSTFILE_FORMAT='$hostname'
++ echo '$hostname'
++ sed -e s/%n/1/g
+ HOSTFILE_FORMAT='$hostname'
++ echo '$hostname'
++ sed -e 's/%u/$user/g'
+ HOSTFILE_FORMAT='$hostname'
++ echo '$hostname'
++ sed -e 's/%x/$exe/g'
+ HOSTFILE_FORMAT='$hostname'
+ '[' x0 = x1 ']'
++ pwd
+ working_dir=/work/fcda023/rad
+ rm -f /scratch/fcda023.31804/procgrp.73647
+ '[' -r /tmp/molpro.nodefile.31804.73647 ']'
+ user=fcda023
+ nodes_used=0
+ cat /tmp/molpro.nodefile.31804.73647
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 0 + 1
+ nodes_used=1
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 1 + 1
+ nodes_used=2
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 2 + 1
+ nodes_used=3
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 3 + 1
+ nodes_used=4
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 4 + 1
+ nodes_used=5
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 5 + 1
+ nodes_used=6
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 6 + 1
+ nodes_used=7
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 7 + 1
+ nodes_used=8
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 8 + 1
+ nodes_used=9
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 9 + 1
+ nodes_used=10
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 10 + 1
+ nodes_used=11
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 11 + 1
+ nodes_used=12
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 12 + 1
+ nodes_used=13
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 13 + 1
+ nodes_used=14
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 14 + 1
+ nodes_used=15
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ echo node396
++ sed -e 's/ .*$//g'
+ hostname=node396
++ echo node396
++ sed -e 's/^[^ ]* *//g' -e 's/ .*$//g'
+ count=
+ '[' x = x ']'
+ count=1
+ '[' 1 -gt 0 ']'
++ expr 1 - 1
+ count=0
++ expr 15 + 1
+ nodes_used=16
+ '[' x '!=' x ']'
+ eval echo '$hostname'
++ echo node396
+ '[' 0 -gt 0 ']'
+ read line
++ cat /scratch/fcda023.31804/procgrp.73647
++ wc -l
+ MP_PROCS=16
+ '[' x0 = x1 ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra %x
++ sed -e 's/%x/$exe/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%h/$hostfile/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%n/$MP_PROCS/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%p/$tasks_per_node/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$LD_LIBRARY_PATH'
++ j=
+ '[' x '!=' x ']'
+ for i in '$envlist'
+ eval 'j=$AIXTHREAD_SCOPE'
++ j=s
+ '[' xs '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/AIXTHREAD_SCOPE=$AIXTHREAD_SCOPE %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_PREFIX'
++ j=/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8
+ '[' x/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8 '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/MOLPRO_PREFIX=$MOLPRO_PREFIX %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$MP_NODES'
++ j=0
+ '[' x0 '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/MP_NODES=$MP_NODES %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$MP_PROCS'
++ j=16
+ '[' x16 '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/MP_PROCS=$MP_PROCS %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$MP_TASKS_PER_NODE'
++ j=
+ '[' x '!=' x ']'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_NOARG'
++ j=1
+ '[' x1 '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/MOLPRO_NOARG=$MOLPRO_NOARG %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_OPTIONS'
++ j=' a2_nevpt2.mop'
+ '[' 'x a2_nevpt2.mop' '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/MOLPRO_OPTIONS=$MOLPRO_OPTIONS %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_OPTIONS_FILE'
++ j=
+ '[' x '!=' x ']'
+ for i in '$envlist'
+ eval 'j=$MPI_MAX_CLUSTER_SIZE'
++ j=
+ '[' x '!=' x ']'
+ for i in '$envlist'
+ eval 'j=$MV2_ENABLE_AFFINITY'
++ j=
+ '[' x '!=' x ']'
+ for i in '$envlist'
+ eval 'j=$RT_GRQ'
++ j=ON
+ '[' xON '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/RT_GRQ=$RT_GRQ %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$TMPDIR'
++ j=/scratch/fcda023.31804
+ '[' x/scratch/fcda023.31804 '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/TMPDIR=$TMPDIR %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$XLSMPOPTS'
++ j=parthds=1
+ '[' xparthds=1 '!=' x ']'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e 's/%e/XLSMPOPTS=$XLSMPOPTS %e/g'
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
++ sed -e s/%e//g
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra $exe'
+ for i in '$envlist'
+ eval 'j=$LD_LIBRARY_PATH'
++ j=
+ '[' x '!=' x ']'
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$AIXTHREAD_SCOPE'
++ j=s
+ '[' xs '!=' x ']'
+ export AIXTHREAD_SCOPE
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_PREFIX'
++ j=/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8
+ '[' x/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8 '!=' x ']'
+ export MOLPRO_PREFIX
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MP_NODES'
++ j=0
+ '[' x0 '!=' x ']'
+ export MP_NODES
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MP_PROCS'
++ j=16
+ '[' x16 '!=' x ']'
+ export MP_PROCS
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MP_TASKS_PER_NODE'
++ j=
+ '[' x '!=' x ']'
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_NOARG'
++ j=1
+ '[' x1 '!=' x ']'
+ export MOLPRO_NOARG
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_OPTIONS'
++ j=' a2_nevpt2.mop'
+ '[' 'x a2_nevpt2.mop' '!=' x ']'
+ export MOLPRO_OPTIONS
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MOLPRO_OPTIONS_FILE'
++ j=
+ '[' x '!=' x ']'
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MPI_MAX_CLUSTER_SIZE'
++ j=
+ '[' x '!=' x ']'
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$MV2_ENABLE_AFFINITY'
++ j=
+ '[' x '!=' x ']'
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$RT_GRQ'
++ j=ON
+ '[' xON '!=' x ']'
+ export RT_GRQ
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$TMPDIR'
++ j=/scratch/fcda023.31804
+ '[' x/scratch/fcda023.31804 '!=' x ']'
+ export TMPDIR
+ '[' x0 = x1 ']'
+ for i in '$envlist'
+ eval 'j=$XLSMPOPTS'
++ j=parthds=1
+ '[' xparthds=1 '!=' x ']'
+ export XLSMPOPTS
+ '[' x0 = x1 ']'
+ '[' x1 '!=' x1 ']'
++ eval echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra '$exe'
+++ echo /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro.exe
+ LAUNCHER='/sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro.exe  a2_nevpt2.mop'
+ '[' x0 = x1 ']'
+ '[' x '!=' x ']'
+ '[' xparallel = xserial ']'
++ uname -s
+ '[' xLinux = xDarwin ']'
+ '[' -t 0 ']'
+ eval /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro.exe a2_nevpt2.mop
++ /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/mpiexec.hydra /sw/app/molpro/2015.1.0/molprop_2015_1_linux_x86_64_i8/bin/molpro.exe a2_nevpt2.mop
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large
 arg of npos too large

 GLOBAL ERROR fehler on processor   1                                         
1:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 1

 GLOBAL ERROR fehler on processor   4                                         
4:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 4
 arg of npos too large

 GLOBAL ERROR fehler on processor  11                                         
11:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 11
 arg of npos too large
 arg of npos too large

 GLOBAL ERROR fehler on processor   6                                         
6:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 6

 GLOBAL ERROR fehler on processor   3                                         
3:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 3

 GLOBAL ERROR fehler on processor   7                                         
7:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 7

 GLOBAL ERROR fehler on processor   0                                         
0:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000004, 1) - process 0

 GLOBAL ERROR fehler on processor  15                                         
15:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 15

 GLOBAL ERROR fehler on processor   9                                         
9:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 9

 GLOBAL ERROR fehler on processor  10                                         
10:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 10

 GLOBAL ERROR fehler on processor   5                                         
5:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 5

 GLOBAL ERROR fehler on processor  12                                         
12:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 12

 GLOBAL ERROR fehler on processor   2                                         
2:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 2

 GLOBAL ERROR fehler on processor  14                                         
14:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 14
 arg of npos too large
 arg of npos too large

 GLOBAL ERROR fehler on processor  13                                         
13:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 13

 GLOBAL ERROR fehler on processor   8                                         
8:fehler:Received an Error in Communication
application called MPI_Abort(comm=0x84000002, 1) - process 8

===================================================================================
=   BAD TERMINATION OF ONE OF YOUR APPLICATION PROCESSES
=   PID 73905 RUNNING AT node396
=   EXIT CODE: 1
=   CLEANING UP REMAINING PROCESSES
=   YOU CAN IGNORE THE BELOW CLEANUP MESSAGES
===================================================================================
+ rc=1
+ '[' x '!=' x ']'
+ rm -f /scratch/fcda023.31804/procgrp.73647
+ '[' x '!=' x ']'
+ exit 1
