There's many many ways to solve this of course. Another possible solution might have been something that looks a bit like this:
Leave it on his primary server and use cpulimit to constrain CPU usage of ffmpeg to, say, 30% - this addresses the throttling problem:
cpulimit -l 30 -- ffmpeg -i input.mp4 -c:v libx264 -preset veryfast output.mp4
Then use the simplest of queueing mechanisms which is just to move files between directories - a long proven and reliable way to do a processing queue:
bash code here:
#!/usr/bin/env bash
set -u
INCOMING="./incoming"
PROCESSING="./processing"
DONE="./done"
FAILED="./failed"
CPU_LIMIT=30
SLEEP_SECONDS=2
mkdir -p "$INCOMING" "$PROCESSING" "$DONE" "$FAILED"
# recover files left in processing after a crash
for path in "$PROCESSING"/*; do
[ -e "$path" ] || continue
name="$(basename "$path")"
mv "$path" "$INCOMING/$name"
done
while true; do
FILE=""
# find first file in incoming
for path in "$INCOMING"/*; do
[ -f "$path" ] || continue
FILE="$path"
break
done
# if nothing available wait
if [ -z "$FILE" ]; then
sleep "$SLEEP_SECONDS"
continue
fi
BASENAME="$(basename "$FILE")"
STEM="${BASENAME%.*}"
WORK="$PROCESSING/$BASENAME"
OUTPUT="$DONE/$STEM.mp4"
# move file into processing
mv "$FILE" "$WORK"
echo "Processing $WORK"
# run ffmpeg under cpu limit (blocking)
cpulimit -l "$CPU_LIMIT" -- ffmpeg -y -threads 1 -i "$WORK" -c:v libx264 -preset veryfast -c:a aac "$OUTPUT"
STATUS=$?
if [ "$STATUS" -eq 0 ]; then
rm "$WORK"
echo "Finished $OUTPUT"
else
mv "$WORK" "$FAILED/$BASENAME"
echo "Failed $FAILED/$BASENAME"
fi
done