while read file;do # 1 Server=$(echo $file | awk '{ print $1 }') # 2 FDate=$(echo $file | awk '{ print $2 }') # 3 ST=$(cat foobar | grep $Server | awk '{ print $3 }') # 4 #ST=$(grep $Server foobar | awk '{ print $3 }') # 5 grep needs at least the pattern to search for (or an -e or -f option providing the equivalent), so if $Server ends up being empty, then the unquoted $Server on lines 4 and 5 disappear during word splittingword splitting (see also When is double-quoting necessary?), and
- the
grepon line 4 gets no arguments. Without the mandatory argument, it prints the usage description. - the
grepon line 5 gets the single argumentfoobar, which it takes as a pattern. By default it reads from standard input, and inside the loop, it has the same stdin as the loop, so eats everything from there.
Now, the whole loop reminds me of this question: Why is using a shell loop to process text considered bad practice? and it could be simplified at least somewhat. read can split the input on fields itself, so we can remove the command substitutions.
Then, we should probably deal with the case where one or both of the values happen to be empty. And, since awk can do the job of grep, too, let's do that:
while read server fdate; do if [ -z "$server" ] || [ -z "$fdate" ]; do continue fi ST=$(awk < foobar -v server="$server" '$0 ~ server { print $3 }') echo "server $server fdate $fdate ST $ST" done < inputfile (or, depending on what you're intending to do in the end, replace the whole thing with an awk program.)