#!/bin/bash

#
#Args:
#  $1 configfile name
#  $2 reportable/non-reportable 
#  $3 error ignoring
#  $4 rate/speed 
#  $5 number of users
#  $6 number of repetition
#  $7 base/peak
#  $8 all/specific cases
#  $9 install source for tor or gcc 

. /etc/autobench.conf || . functions

function print_usage(){
	echo " $0 <config_name> <reportability> <error> <rate/speed> <#users> <#rep> <base/peak> <test cases>"
	echo "\tconfig_name is the name of the config file"
	echo "\treportability is for reportable or non-reportable option"
	echo "\terror is to specify weather we want to ignore errors or not"
	echo "\trate/speed is for the 2 types of speccpu runs"
	echo "\t#users is for rate runs, how many copies."
	echo "\t#rep needs to be 3+ and odd number for it to be valid runs"
	echo "\tbase/peak is for publish type"
	echo "\ttest_case specifies all or int or fp or specify cases"
	exit 1
}

# Process the arguments 
if [ $# -lt 9 ] ; then
	print_usage
fi

CONFIGNAME=$1

if [ $2 -eq 0 ] ; then
	REPORTABILITY=" --noreportable "
else
	REPORTABILITY=""
fi

if [ $3 -eq 1 ] ; then
	ERROR=" -I "
else
	ERROR=""
fi

if [ $4 -eq 1 ] ; then
	RUNTYPE=" -r "
else
	RUNTYPE=""
fi

USER=$5
REP=$6

PUBLISHTYPE=$7
TESTCASE=$8

COMMAND="runspec -c $CONFIGNAME $REPORTABILITY $ERROR $RUNTYPE -u $USER -n $REP -T $PUBLISHTYPE $TESTCASE" 

rm -R $AUTODIR/sources/speccpu/

$AUTODIR/scripts/benchmarks/install/speccpu $9 || exit 2

# Prepare the benchmark #########################################

#getcommand doprofilers
#doprofilers install

pushd $AUTODIR/sources/speccpu > /dev/null

# create symlink between results to upload folder

    rm -R result

ln -s $LOGDIR/benchmark result

. $AUTODIR/sources/speccpu/shrc

# Run the benchmark ############################################
startprofilers
echo $COMMAND
$COMMAND

stopprofilers

doprofilers report
doprofilers postprocess

rm result
popd

