If you have more jobs than can safely be run in parallel at the same time, you can use parset from GNU Parallel:
i="$(parallel --files ::: "${cmd[@]}" | perl -pe '$_="\"\`cat $_;rm $_\`\" "')"
eval my_result_array=($i)
unset i
Or in general:
parset() {
local vname
vname="$1"
shift
if [[ "$(declare -p $vname 2>/dev/null)" =~ "declare -a" ]]; then
# vname refers to an array
# The array elements refers to variable names to put output into
eval $(
parallel --files "$@" |
perl -pe 'chop;$_="\"\`cat $_; rm $_\`\"\n"' |
parallel echo {2}={1} :::: - :::+ $(eval echo '${'$vname'[@]}')
)
else
# vname is not an existing array: Put output into that
eval $vname="( $( parallel --files "$@" |
perl -pe 'chop;$_="\"\`cat $_; rm $_\`\" "' ) )"
fi
}
# Put output into vars $seqfoo, $pwdfizz, $ls
into_vars=(seq pwd ls)
parset into_vars ::: "seq 10" pwd ls
echo "$ls"
# Put output into aaa
unset aaa
parset aaa seq 3 ::: 4 5 6
echo "${aaa[1]}"
# Output can contain spaces
parset out ::: "echo '<<joe \"double space\" cartoon>>'" "pwd"
echo "${out[0]}"
echo "${out[1]}"
# The commands to run can be an array
cmd=("echo '<<joe \"double space\" cartoon>>'" "pwd")
parsetrick datasomecommand ::: "${cmd[@]}"
echo "${data[0]}"
echo "${data[1]}"
# You cannot pipe into parset, but must use a tempfile
seq 10bar >baz parallel_inputmorty
parset res echo ::::export parallel_inputfoo
echoexport "${res[0]}"fizz
echoexport "${res[9]}"rick
See details: https://www.gnu.org/software/parallel/parset.html