#!/bin/bash
for tracelength in 10 20 50 100 ; do
step=0.1
short=0
long=1
for firstloop in {1..10}; do
ratio=0
for secondloop in {1..10} ; do
for repeat in {1..20} ; do
echo $tracelength $short $long $ratio >results.csv
python3 main.py "$tracelength" "$short" "$long" "$ratio" > file.smt2
/usr/bin/time /Users/Desktop/optimathsat-1.5.1-macos-64-bit/bin/optimathsat < file.smt2 > results.csv
done
ratio=$(echo "scale=10; $ratio + $step" | bc)
done
short=$(echo "scale=10; $short + $step" | bc)
long=$(echo "scale=10; $long - $step" | bc)
done
done
I want to parallelize the inside loop (repeat).
I have installed GNU parallel and I know some of the basics but because the loop has more than one command I have no idea how I can parallelize them.
I transfered the content of the loop to another script which I guess is the way to go but my 3 commands need to take the variables (tracelength, ratio, short, long) and run according to them. any idea how to pass the parameters from a script to a subscript. or do you maybe have a better idea of parallelization?
I am editing the question because I used the answer below but the now my execution time is always 0.00 regadless of how big file.smt2 is.
this is the new version of code:
#!/bin/bash
doone() {
tracelength="$1"
short="$2"
long="$3"
ratio="$4"
#echo "$tracelength $short $long $ratio" >> results.csv
python3 main.py "$tracelength" "$short" "$long" "$ratio" >> file.smt2
gtime -f "%U" /Users/Desktop/optimathsat-1.5.1-macos-64-bit/bin/optimathsat < file.smt2
}
export -f doone
step=0.2
parallel doone \
::: 200 300 \
:::: <(seq 0 $step 0.5) \
::::+ <(seq 1 -$step 0.5) \
:::: <(seq 0 $step 0.5) \
::: {1..5} &> results.csv
In your original code you overwrite
results.csv
again and again. I assume that is a mistake and that you instead want it combined into a big csvfile:If you want a csvfile per run:
If you want a csv file containing the arguments and run time use: