#!/bin/bash ##### The following line will request 10 (virtual) nodes, each with 24 cores running 24 mpi processes ##### a total of 24-way parallel. #PBS -l select=10:ncpus=24:mpiprocs=24:mem=4GB #PBS -q normal ##### Supply YOUR resource programme code in the next line #PBS -P MECH0000 #PBS -l walltime=1:00:00 #### CFD-Ace will not work on scratch, work in home directory: #PBS -o /home/username/acetesting/ace.out #PBS -e /home/username/acetesting/ace.err ##### The following two lines will send the user an e-mail when the job aborts, begins or ends. #PBS -m abe #PBS -M username@email.co.za ##### Set up path. To ensure that the compute nodes also have access to this path, ##### also add these lines to your .bashrc file export ESI_HOME=/apps/chpc/compmech/CFD/ESI export PATH=$ESI_HOME/2015.0/UTILS/bin:$PATH export LD_LIBRARY_PATH=$ESI_HOME/2015.0/UTILS/lib:$LD_LIBRARY_PATH ##### set up ssh-tunnels to your license server. Obviously use the right port numbers. And server URL. #### lmgrd daemon ssh -f username@chpclic1 -L 1999:licenseserver.ac.za:1999 -N #### vendor daemon port ssh -f username@chpclic1 -L 1998:licenseserver.ac.za:1998 -N #### Tell solver where to look for the license. #### localhost is correct here, it follows from the ssh-tunneling export LM_LICENSE_FILE=1999@localhost #### There is no -d option available under PBS Pro, therefore #### explicitly set working directory and change to that. #### CFD-Ace will not work on scratch, work in home directory. export PBS_JOBDIR=/home/username/acetesting cd $PBS_JOBDIR nproc=`cat $PBS_NODEFILE | wc -l` #### This is a minimal run instruction, #### it will run the solver until reaching the stopping criterium set in the DTF file. CFD-SOLVER --model=test01 --wd=$PBS_JOBDIR --num=$nproc --hosts=$PBS_NODEFILE --sim=1 > test01.out