From 86455757ec088cecd177a38a0562766acc2d92e9 Mon Sep 17 00:00:00 2001 From: Dmitry Date: Mon, 10 Sep 2018 16:31:44 +0300 Subject: [PATCH 1/6] Series runs --- nancy_run.sh | 957 +++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 708 insertions(+), 249 deletions(-) diff --git a/nancy_run.sh b/nancy_run.sh index 64d4a0a..cfcb4dc 100755 --- a/nancy_run.sh +++ b/nancy_run.sh @@ -17,6 +17,20 @@ VERBOSE_OUTPUT_REDIRECT=" > /dev/null" EBS_SIZE_MULTIPLIER=15 POSTGRES_VERSION_DEFAULT=10 AWS_BLOCK_DURATION=0 +declare -a RUNS # i - delta_config i+1 delta_ddl_do i+2 delta_ddl_undo + +function _attach_pancake_drive() { + docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/basedump\"" + INSTANCE_ID=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id) + attachResult=$(aws --region=$AWS_REGION ec2 attach-volume --device /dev/xvdc --volume-id $BACKUP_VOLUME_ID --instance-id $INSTANCE_ID) + sleep 10 + docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdc /home/basedump + docker-machine ssh $DOCKER_MACHINE "sudo df -h /dev/xvdc" +} + +function _dettach_pancake_drive() { + dettachResult=$(aws --region=$AWS_REGION ec2 detach-volume --volume-id $BACKUP_VOLUME_ID) +} ####################################### # Print a help @@ -366,6 +380,53 @@ function check_path() { fi } +####################################### +# Parse simple YAML file +# Globals: +# None +# Arguments: +# (text) path to yaml file +# Returns: +# None +####################################### +function parse_yaml() { + local yaml_file=$1 + local prefix=$2 + local s + local w + local fs + + s='[[:space:]]*' + w='[a-zA-Z0-9_.-]*' + fs="$(echo @|tr @ '\034')" + + ( + sed -ne '/^--/s|--||g; s|\"|\\\"|g; s/\s*$//g;' \ + -e "/#.*[\"\']/!s| #.*||g; /^#/s|#.*||g;" \ + -e "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \ + -e "s|^\($s\)\($w\)$s[:-]$s\(.*\)$s\$|\1$fs\2$fs\3|p" | + awk -F"$fs" '{ + indent = length($1)/2; + if (length($2) == 0) { conj[indent]="+";} else {conj[indent]="";} + vname[indent] = $2; + for (i in vname) {if (i > indent) {delete vname[i]}} + if (length($3) > 0) { + vn=""; for (i=0; i $TMP_PATH/target_config_tmp_$i.sql + RUNS[$j]="$TMP_PATH/target_config_tmp_$i.sql" + fi + fi + if [[ ! -z "$delta_ddl_do" ]]; then + check_path delta_ddl_do + if [[ "$?" -ne "0" ]]; then + echo "$delta_ddl_do" > $TMP_PATH/target_ddl_do_tmp_$i.sql + RUNS[$d]="$TMP_PATH/target_ddl_do_tmp_$i.sql" + fi + fi + if [[ ! -z "$delta_ddl_undo" ]]; then + check_path delta_ddl_undo + if [[ "$?" -ne "0" ]]; then + echo "$delta_ddl_undo" > $TMP_PATH/target_ddl_undo_tmp_$i.sql + RUNS[$u]="$TMP_PATH/target_ddl_undo_tmp_$i.sql" + fi + fi + let i=$i+1 + [[ "$i" -eq "$runs_count" ]] && break; + done + else + if ( \ + ([[ -z ${DELTA_SQL_UNDO+x} ]] && [[ ! -z ${DELTA_SQL_DO+x} ]]) \ + || ([[ -z ${DELTA_SQL_DO+x} ]] && [[ ! -z ${DELTA_SQL_UNDO+x} ]]) + ); then + err "ERROR: if '--delta-sql-do' is specified, '--delta-sql-undo' must be also specified, and vice versa." + exit 1; + fi + if [[ ! -z ${DELTA_SQL_DO+x} ]]; then + check_path DELTA_SQL_DO + if [[ "$?" -ne "0" ]]; then + echo "$DELTA_SQL_DO" > $TMP_PATH/target_ddl_do_tmp.sql + DELTA_SQL_DO="$TMP_PATH/target_ddl_do_tmp.sql" + fi + fi + + if [[ ! -z ${DELTA_SQL_UNDO+x} ]]; then + check_path DELTA_SQL_UNDO + if [[ "$?" -ne "0" ]]; then + echo "$DELTA_SQL_UNDO" > $TMP_PATH/target_ddl_undo_tmp.sql + DELTA_SQL_UNDO="$TMP_PATH/target_ddl_undo_tmp.sql" + fi + fi + + if [[ ! -z ${DELTA_CONFIG+x} ]]; then + check_path DELTA_CONFIG + if [[ "$?" -ne "0" ]]; then + echo "$DELTA_CONFIG" > $TMP_PATH/target_config_tmp.conf + DELTA_CONFIG="$TMP_PATH/target_config_tmp.conf" + fi + fi + RUNS[0]=DELTA_CONFIG + RUNS[1]=DELTA_SQL_DO + RUNS[2]=DELTA_SQL_UNDO fi if [[ -z ${ARTIFACTS_DESTINATION+x} ]]; then @@ -601,30 +767,6 @@ function check_cli_parameters() { SQL_BEFORE_DB_RESTORE="$TMP_PATH/before_db_init_code_tmp.sql" fi fi - - if [[ ! -z ${DELTA_SQL_DO+x} ]]; then - check_path DELTA_SQL_DO - if [[ "$?" -ne "0" ]]; then - echo "$DELTA_SQL_DO" > $TMP_PATH/target_ddl_do_tmp.sql - DELTA_SQL_DO="$TMP_PATH/target_ddl_do_tmp.sql" - fi - fi - - if [[ ! -z ${DELTA_SQL_UNDO+x} ]]; then - check_path DELTA_SQL_UNDO - if [[ "$?" -ne "0" ]]; then - echo "$DELTA_SQL_UNDO" > $TMP_PATH/target_ddl_undo_tmp.sql - DELTA_SQL_UNDO="$TMP_PATH/target_ddl_undo_tmp.sql" - fi - fi - - if [[ ! -z ${DELTA_CONFIG+x} ]]; then - check_path DELTA_CONFIG - if [[ "$?" -ne "0" ]]; then - echo "$DELTA_CONFIG" > $TMP_PATH/target_config_tmp.conf - DELTA_CONFIG="$TMP_PATH/target_config_tmp.conf" - fi - fi ### End of CLI parameters checks ### } @@ -896,7 +1038,7 @@ function use_ec2_ebs_drive() { # None ####################################### function cleanup_and_exit { - if [ "$KEEP_ALIVE" -gt "0" ]; then + if [[ "$KEEP_ALIVE" -gt "0" ]]; then msg "Debug timeout is $KEEP_ALIVE seconds – started." msg " To connect docker machine use:" msg " docker-machine ssh $DOCKER_MACHINE" @@ -914,7 +1056,7 @@ function cleanup_and_exit { docker container rm -f $CONTAINER_HASH elif [[ "$RUN_ON" == "aws" ]]; then destroy_docker_machine $DOCKER_MACHINE - if [ ! -z ${VOLUME_ID+x} ]; then + if [[ ! -z ${VOLUME_ID+x} ]]; then msg "Wait and delete volume $VOLUME_ID" sleep 60 # wait for the machine to be removed delvolout=$(aws ec2 delete-volume --volume-id $VOLUME_ID) @@ -930,7 +1072,7 @@ function cleanup_and_exit { # # # # # MAIN # # # # # ####################################### # Process CLI options -while [ $# -gt 0 ]; do +while [[ $# -gt 0 ]]; do case "$1" in help ) help; @@ -998,13 +1140,17 @@ while [ $# -gt 0 ]; do --aws-ssh-key-path ) AWS_SSH_KEY_PATH="$2"; shift 2 ;; --aws-ebs-volume-size ) - AWS_EBS_VOLUME_SIZE="$2"; shift 2 ;; + AWS_EBS_VOLUME_SIZE="$2"; shift 2 ;; --aws-region ) - AWS_REGION="$2"; shift 2 ;; + AWS_REGION="$2"; shift 2 ;; --aws-zone ) - AWS_ZONE="$2"; shift 2 ;; + AWS_ZONE="$2"; shift 2 ;; --aws-block-duration ) - AWS_BLOCK_DURATION=$2; shift 2 ;; + AWS_BLOCK_DURATION=$2; shift 2 ;; + --runs-config ) + RUNS_CONFIG=$2; shift 2;; + --backup-volume-id ) + BACKUP_VOLUME_ID=$2; shift 2;; --s3cfg-path ) S3_CFG_PATH="$2"; shift 2 ;; @@ -1083,6 +1229,10 @@ elif [[ "$RUN_ON" == "aws" ]]; then msg " To connect docker machine use:" msg " docker-machine ssh $DOCKER_MACHINE" + if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + _attach_pancake_drive; + fi + docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/storage\"" if [[ "${AWS_EC2_TYPE:0:2}" == "i3" ]]; then msg "Using high-speed NVMe SSD disks" @@ -1090,7 +1240,7 @@ elif [[ "$RUN_ON" == "aws" ]]; then else msg "Use EBS volume" # Create new volume and attach them for non i3 instances if needed - if [ ! -z ${AWS_EBS_VOLUME_SIZE+x} ]; then + if [[ ! -z ${AWS_EBS_VOLUME_SIZE+x} ]]; then use_ec2_ebs_drive $AWS_EBS_VOLUME_SIZE; fi fi @@ -1114,21 +1264,7 @@ fi MACHINE_HOME="/machine_home/nancy_${CONTAINER_HASH}" alias docker_exec='docker $DOCKER_CONFIG exec -i ${CONTAINER_HASH} ' - -docker_exec bash -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME" -if [[ "$RUN_ON" == "aws" ]]; then - docker-machine ssh $DOCKER_MACHINE "sudo chmod a+w /home/storage" - MACHINE_HOME="$MACHINE_HOME/storage" - docker_exec bash -c "ln -s /storage/ $MACHINE_HOME" - - msg "Move posgresql to a separate volume" - docker_exec bash -c "sudo /etc/init.d/postgresql stop" - sleep 2 # wait for postgres stopped - docker_exec bash -c "sudo mv /var/lib/postgresql /storage/" - docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql" - docker_exec bash -c "sudo /etc/init.d/postgresql start" - sleep 2 # wait for postgres started -fi +CPU_CNT=$(docker_exec bash -c "cat /proc/cpuinfo | grep processor | wc -l") # for execute in docker ####################################### # Copy file to container @@ -1158,217 +1294,540 @@ function copy_file() { fi } -[ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \ - && docker_exec cp $MACHINE_HOME/.s3cfg /root/.s3cfg -[ ! -z ${DB_DUMP+x} ] && copy_file $DB_DUMP -[ ! -z ${PG_CONFIG+x} ] && copy_file $PG_CONFIG -[ ! -z ${DELTA_CONFIG+x} ] && copy_file $DELTA_CONFIG -[ ! -z ${DELTA_SQL_DO+x} ] && copy_file $DELTA_SQL_DO -[ ! -z ${DELTA_SQL_UNDO+x} ] && copy_file $DELTA_SQL_UNDO -[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && copy_file $WORKLOAD_CUSTOM_SQL -[ ! -z ${WORKLOAD_REAL+x} ] && copy_file $WORKLOAD_REAL +####################################### +# Execute shell commands in container after it was started +# Globals: +# COMMANDS_AFTER_CONTAINER_INIT, MACHINE_HOME,docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_commands_after_container_init() { + OP_START_TIME=$(date +%s); + if ([[ ! -z ${COMMANDS_AFTER_CONTAINER_INIT+x} ]] && [[ "$COMMANDS_AFTER_CONTAINER_INIT" != "" ]]) + then + msg "Apply code after docker init" + COMMANDS_AFTER_CONTAINER_INIT_FILENAME=$(basename $COMMANDS_AFTER_CONTAINER_INIT) + copy_file $COMMANDS_AFTER_CONTAINER_INIT + # --set ON_ERROR_STOP=on + docker_exec bash -c "chmod +x $MACHINE_HOME/$COMMANDS_AFTER_CONTAINER_INIT_FILENAME" + docker_exec sh $MACHINE_HOME/$COMMANDS_AFTER_CONTAINER_INIT_FILENAME + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "After docker init code has been applied for $DURATION." + fi +} -## Apply machine features -# Dump -sleep 2 # wait for postgres up&running - -OP_START_TIME=$(date +%s); -if ([ ! -z ${COMMANDS_AFTER_CONTAINER_INIT+x} ] && [ "$COMMANDS_AFTER_CONTAINER_INIT" != "" ]) -then - msg "Apply code after docker init" - COMMANDS_AFTER_CONTAINER_INIT_FILENAME=$(basename $COMMANDS_AFTER_CONTAINER_INIT) - copy_file $COMMANDS_AFTER_CONTAINER_INIT - # --set ON_ERROR_STOP=on - docker_exec bash -c "chmod +x $MACHINE_HOME/$COMMANDS_AFTER_CONTAINER_INIT_FILENAME" - docker_exec sh $MACHINE_HOME/$COMMANDS_AFTER_CONTAINER_INIT_FILENAME +####################################### +# Execute sql code before restore database +# Globals: +# SQL_BEFORE_DB_RESTORE, MACHINE_HOME, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_sql_before_db_restore() { + OP_START_TIME=$(date +%s); + if ([[ ! -z ${SQL_BEFORE_DB_RESTORE+x} ]] && [[ "$SQL_BEFORE_DB_RESTORE" != "" ]]); then + msg "Apply sql code before db init" + SQL_BEFORE_DB_RESTORE_FILENAME=$(basename $SQL_BEFORE_DB_RESTORE) + copy_file $SQL_BEFORE_DB_RESTORE + # --set ON_ERROR_STOP=on + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$SQL_BEFORE_DB_RESTORE_FILENAME $VERBOSE_OUTPUT_REDIRECT" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Before init SQL code applied for $DURATION." + fi +} + +####################################### +# Restore database dump +# Globals: +# DB_DUMP_EXT, DB_DUMP_FILENAME, DB_NAME, MACHINE_HOME, VERBOSE_OUTPUT_REDIRECT +# Arguments: +# None +# Returns: +# None +####################################### +function restore_dump() { + OP_START_TIME=$(date +%s); + msg "Restore database dump" + case "$DB_DUMP_EXT" in + sql) + docker_exec bash -c "cat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" + ;; + bz2) + docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" + ;; + gz) + docker_exec bash -c "zcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" + ;; + pgdmp) + docker_exec bash -c "pg_restore -j $CPU_CNT --no-owner --no-privileges -U postgres -d $DB_NAME $MACHINE_HOME/$DB_DUMP_FILENAME" || true + ;; + esac END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "After docker init code has been applied for $DURATION." -fi -OP_START_TIME=$(date +%s); -if ([ ! -z ${SQL_BEFORE_DB_RESTORE+x} ] && [ "$SQL_BEFORE_DB_RESTORE" != "" ]); then - msg "Apply sql code before db init" - SQL_BEFORE_DB_RESTORE_FILENAME=$(basename $SQL_BEFORE_DB_RESTORE) - copy_file $SQL_BEFORE_DB_RESTORE - # --set ON_ERROR_STOP=on - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$SQL_BEFORE_DB_RESTORE_FILENAME $VERBOSE_OUTPUT_REDIRECT" + msg "Database dump restored for $DURATION." +} + +####################################### +# Execute sql code after db restore +# Globals: +# SQL_AFTER_DB_RESTORE, DB_NAME, MACHINE_HOME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_sql_after_db_restore() { + # After init database sql code apply + OP_START_TIME=$(date +%s); + if ([[ ! -z ${SQL_AFTER_DB_RESTORE+x} ]] && [[ "$SQL_AFTER_DB_RESTORE" != "" ]]); then + msg "Apply sql code after db init" + SQL_AFTER_DB_RESTORE_FILENAME=$(basename $SQL_AFTER_DB_RESTORE) + copy_file $SQL_AFTER_DB_RESTORE + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$SQL_AFTER_DB_RESTORE_FILENAME $VERBOSE_OUTPUT_REDIRECT" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "After init SQL code applied for $DURATION." + fi +} + +####################################### +# Apply DDL code +# Globals: +# DELTA_SQL_DO, DB_NAME, MACHINE_HOME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_ddl_do_code() { + local delta_ddl_do=$1 + # Apply DDL code + OP_START_TIME=$(date +%s); + if ([[ ! -z "$delta_ddl_do" ]] && [[ "$delta_ddl_do" != "" ]]); then + msg "Apply DDL SQL code" + delta_ddl_do_filename=$(basename $delta_ddl_do) + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$delta_ddl_do_filename $VERBOSE_OUTPUT_REDIRECT" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Delta SQL \"DO\" code applied for $DURATION." + fi +} + +####################################### +# Apply DDL undo code +# Globals: +# DELTA_SQL_UNDO, DB_NAME, MACHINE_HOME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_ddl_undo_code() { + local delta_ddl_undo=$1 + OP_START_TIME=$(date +%s); + if ([[ ! -z ${delta_ddl_undo+x} ]] && [[ "$delta_ddl_undo" != "" ]]); then + msg "Apply DDL undo SQL code" + delta_ddl_undo_filename=$(basename $delta_ddl_undo) + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$delta_ddl_undo_filename $VERBOSE_OUTPUT_REDIRECT" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Delta SQL \"UNDO\" code has been applied for $DURATION." + fi +} + +####################################### +# Apply initial postgres configuration +# Globals: +# PG_CONFIG, MACHINE_HOME, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_initial_postgres_configuration() { + # Apply initial postgres configuration + OP_START_TIME=$(date +%s); + if ([[ ! -z ${PG_CONFIG+x} ]] && [[ "$PG_CONFIG" != "" ]]); then + msg "Apply initial postgres configuration" + PG_CONFIG_FILENAME=$(basename $PG_CONFIG) + docker_exec bash -c "cat $MACHINE_HOME/$PG_CONFIG_FILENAME >> /etc/postgresql/$PG_VERSION/main/postgresql.conf" + docker_exec bash -c "sudo /etc/init.d/postgresql restart" + sleep 10 + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Initial configuration applied for $DURATION." + fi +} + +####################################### +# Apply test postgres configuration +# Globals: +# DELTA_CONFIG, MACHINE_HOME, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function apply_delta_configuration() { + local delta_config=$1 + # Apply postgres configuration + OP_START_TIME=$(date +%s); + if ([[ ! -z "$delta_config" ]] && [[ "$delta_config" != "" ]]); then + msg "Apply postgres configuration" + delta_config_filename=$(basename $delta_config) + docker_exec bash -c "cat $MACHINE_HOME/$delta_config_filename >> /etc/postgresql/$PG_VERSION/main/postgresql.conf" + docker_exec bash -c "sudo /etc/init.d/postgresql restart" + sleep 10 + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Postgres configuration applied for $DURATION." + fi +} + +####################################### +# Prepare to start workload. +# Save restore db log, vacuumdb, clear log +# Globals: +# ARTIFACTS_FILENAME, MACHINE_HOME, DB_NAME, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function prepare_start_workload() { + local run_number=$1 + let run_number=run_number+1 + #Save before workload log + msg "Save prepaparation log" + logpath=$( \ + docker_exec bash -c "psql -XtU postgres \ + -c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \ + | grep / | sed -e 's/^[ \t]*//'" + ) + if [[ "$run_number" -eq "1" ]]; then + # save preparation log only before first run + docker_exec bash -c "mkdir $MACHINE_HOME/$ARTIFACTS_FILENAME" + docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.$run_number.prepare.log.gz" + fi + + # Clear statistics and log + msg "Execute vacuumdb..." + docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze + docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log" +} + +####################################### +# Execute workload. +# Globals: +# WORKLOAD_REAL, WORKLOAD_REAL_REPLAY_SPEED, WORKLOAD_CUSTOM_SQL, MACHINE_HOME, +# DB_NAME, VERBOSE_OUTPUT_REDIRECT, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function execute_workload() { + # Execute workload + OP_START_TIME=$(date +%s); + msg "Execute workload..." + if [[ ! -z ${WORKLOAD_REAL+x} ]] && [[ "$WORKLOAD_REAL" != '' ]]; then + msg "Execute pgreplay queries..." + docker_exec psql -U postgres $DB_NAME -c 'drop role if exists testuser;' + docker_exec psql -U postgres $DB_NAME -c 'create role testuser superuser login;' + WORKLOAD_FILE_NAME=$(basename $WORKLOAD_REAL) + if [[ ! -z ${WORKLOAD_REAL_REPLAY_SPEED+x} ]] && [[ "$WORKLOAD_REAL_REPLAY_SPEED" != '' ]]; then + docker_exec bash -c "pgreplay -r -s $WORKLOAD_REAL_REPLAY_SPEED $MACHINE_HOME/$WORKLOAD_FILE_NAME" + else + docker_exec bash -c "pgreplay -r -j $MACHINE_HOME/$WORKLOAD_FILE_NAME" + fi + else + if ([ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && [ "$WORKLOAD_CUSTOM_SQL" != "" ]); then + WORKLOAD_CUSTOM_FILENAME=$(basename $WORKLOAD_CUSTOM_SQL) + msg "Execute custom sql queries..." + docker_exec bash -c "psql -U postgres $DB_NAME -E -f $MACHINE_HOME/$WORKLOAD_CUSTOM_FILENAME $VERBOSE_OUTPUT_REDIRECT" + fi + fi END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "Before init SQL code applied for $DURATION." -fi + msg "Workload executed for $DURATION." +} -OP_START_TIME=$(date +%s); -msg "Restore database dump" -CPU_CNT=$(docker_exec bash -c "cat /proc/cpuinfo | grep processor | wc -l") # for execute in docker -case "$DB_DUMP_EXT" in - sql) - docker_exec bash -c "cat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" - ;; - bz2) - docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" - ;; - gz) - docker_exec bash -c "zcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" - ;; - pgdmp) - docker_exec bash -c "pg_restore -j $CPU_CNT --no-owner --no-privileges -U postgres -d $DB_NAME $MACHINE_HOME/$DB_DUMP_FILENAME" || true - ;; -esac -END_TIME=$(date +%s); -DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') -msg "Database dump restored for $DURATION." - -# After init database sql code apply -OP_START_TIME=$(date +%s); -if ([ ! -z ${SQL_AFTER_DB_RESTORE+x} ] && [ "$SQL_AFTER_DB_RESTORE" != "" ]); then - msg "Apply sql code after db init" - SQL_AFTER_DB_RESTORE_FILENAME=$(basename $SQL_AFTER_DB_RESTORE) - copy_file $SQL_AFTER_DB_RESTORE - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$SQL_AFTER_DB_RESTORE_FILENAME $VERBOSE_OUTPUT_REDIRECT" +####################################### +# Collect results of workload execution and save to artifact destination +# Globals: +# CONTAINER_HASH, MACHINE_HOME, ARTIFACTS_DESTINATION, docker_exec alias +# Arguments: +# None +# Returns: +# None +####################################### +function collect_results() { + local run_number=$1 + let run_number=run_number+1 + ## Get statistics + OP_START_TIME=$(date +%s); + msg "Prepare JSON log..." + docker_exec bash -c "/root/pgbadger/pgbadger \ + -j $CPU_CNT \ + --prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr \ + -o $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.$run_number.json" \ + 2> >(grep -v "install the Text::CSV_XS" >&2) + msg "Prepare HTML log..." + docker_exec bash -c "/root/pgbadger/pgbadger \ + -j $CPU_CNT \ + --prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr \ + -o $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$run_number.html" \ + 2> >(grep -v "install the Text::CSV_XS" >&2) + + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_activity) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_activity.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_archiver) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_archiver.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_bgwriter) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_bgwriter.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_database) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_database.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_database_conflicts) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_database_conflicts.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_all_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_all_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_sys_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_sys_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_user_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_user_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_xact_all_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_xact_all_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_xact_sys_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_xact_sys_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_xact_user_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_xact_user_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_all_indexes) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_all_indexes.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_sys_indexes) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_sys_indexes.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_user_indexes) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_user_indexes.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_all_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_all_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_sys_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_sys_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_user_tables) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_user_tables.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_all_indexes) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_all_indexes.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_sys_indexes) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_sys_indexes.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_user_indexes) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_user_indexes.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_all_sequences) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_all_sequences.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_sys_sequences) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_sys_sequences.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_statio_user_sequences) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_statio_user_sequences.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_user_functions) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_user_functions.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_xact_user_functions) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_xact_user_functions.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" + + + docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.workload.log.gz" + docker_exec bash -c "cp /etc/postgresql/$PG_VERSION/main/postgresql.conf $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.$run_number.conf" + msg "Save artifacts..." + if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then + docker_exec s3cmd --recursive put /$MACHINE_HOME/$ARTIFACTS_FILENAME $ARTIFACTS_DESTINATION/ + else + if [[ "$RUN_ON" == "localhost" ]]; then + docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME $ARTIFACTS_DESTINATION/ + elif [[ "$RUN_ON" == "aws" ]]; then + mkdir $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME + docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME/* $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/ + else + err "ASSERT: must not reach this point" + exit 1 + fi + fi END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "After init SQL code applied for $DURATION." -fi -# Apply DDL code -OP_START_TIME=$(date +%s); -if ([ ! -z ${DELTA_SQL_DO+x} ] && [ "$DELTA_SQL_DO" != "" ]); then - msg "Apply DDL SQL code" - DELTA_SQL_DO_FILENAME=$(basename $DELTA_SQL_DO) - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$DELTA_SQL_DO_FILENAME $VERBOSE_OUTPUT_REDIRECT" + msg "Statistics got for $DURATION." +} + +function _cp_backup_2_storage() { + docker_exec bash -c "mkdir -p /storage/backup/pg_base" + docker_exec bash -c "mkdir -p /storage/backup/pg_base_tblspace" + + OP_START_TIME=$(date +%s); + docker_exec bash -c "cp -r -p -f /basedump/pg_base/* /storage/backup/pg_base/" END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "Delta SQL \"DO\" code applied for $DURATION." -fi -# Apply initial postgres configuration -OP_START_TIME=$(date +%s); -if ([ ! -z ${PG_CONFIG+x} ] && [ "$PG_CONFIG" != "" ]); then - msg "Apply initial postgres configuration" - PG_CONFIG_FILENAME=$(basename $PG_CONFIG) - docker_exec bash -c "cat $MACHINE_HOME/$PG_CONFIG_FILENAME >> /etc/postgresql/$PG_VERSION/main/postgresql.conf" - if [ -z ${DELTA_CONFIG+x} ] - then - docker_exec bash -c "sudo /etc/init.d/postgresql restart" - fi + msg "pg_base copied for $DURATION to storage." + + OP_START_TIME=$(date +%s); + docker_exec bash -c "cp -r -p -f /basedump/pg_base_tblspace/* /storage/backup/pg_base_tblspace/" END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "Initial configuration applied for $DURATION." -fi -# Apply postgres configuration -OP_START_TIME=$(date +%s); -if ([ ! -z ${DELTA_CONFIG+x} ] && [ "$DELTA_CONFIG" != "" ]); then - msg "Apply postgres configuration" - DELTA_CONFIG_FILENAME=$(basename $DELTA_CONFIG) - docker_exec bash -c "cat $MACHINE_HOME/$DELTA_CONFIG_FILENAME >> /etc/postgresql/$PG_VERSION/main/postgresql.conf" - docker_exec bash -c "sudo /etc/init.d/postgresql restart" + msg "pg_base_16418 copied for $DURATION to storage." +} + +function _cp_backup() { + # Here we think what postgress stopped + docker_exec bash -c "rm -rf /var/lib/postgresql/9.6/main/*" + + OP_START_TIME=$(date +%s); + docker_exec bash -c "rm -rf /var/lib/postgresql/$PG_VERSION/main/*" + docker_exec bash -c "cp -r -p -f /storage/backup/pg_base/* /storage/postgresql/$PG_VERSION/main/" END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "Postgres configuration applied for $DURATION." -fi -#Save before workload log -msg "Save prepaparation log" -logpath=$( \ - docker_exec bash -c "psql -XtU postgres \ - -c \"select string_agg(setting, '/' order by name) from pg_settings where name in ('log_directory', 'log_filename');\" \ - | grep / | sed -e 's/^[ \t]*//'" -) -docker_exec bash -c "mkdir $MACHINE_HOME/$ARTIFACTS_FILENAME" -docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.prepare.log.gz" - -# TODO(ns) get prepare.log.gz -#if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then -# docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.prepare.log.gz $ARTIFACTS_DESTINATION/ -#else -# docker `docker-machine config $DOCKER_MACHINE` cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME.prepare.log.gz $ARTIFACTS_DESTINATION/ -#fi - -# Clear statistics and log -msg "Execute vacuumdb..." -docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze -docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log" - -# Execute workload -OP_START_TIME=$(date +%s); -msg "Execute workload..." -if [ ! -z ${WORKLOAD_REAL+x} ] && [ "$WORKLOAD_REAL" != '' ]; then - msg "Execute pgreplay queries..." - docker_exec psql -U postgres $DB_NAME -c 'create role testuser superuser login;' - WORKLOAD_FILE_NAME=$(basename $WORKLOAD_REAL) - if [ ! -z ${WORKLOAD_REAL_REPLAY_SPEED+x} ] && [ "$WORKLOAD_REAL_REPLAY_SPEED" != '' ]; then - docker_exec bash -c "pgreplay -r -s $WORKLOAD_REAL_REPLAY_SPEED $MACHINE_HOME/$WORKLOAD_FILE_NAME" - else - docker_exec bash -c "pgreplay -r -j $MACHINE_HOME/$WORKLOAD_FILE_NAME" - fi -else - if ([ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && [ "$WORKLOAD_CUSTOM_SQL" != "" ]); then - WORKLOAD_CUSTOM_FILENAME=$(basename $WORKLOAD_CUSTOM_SQL) - msg "Execute custom sql queries..." - docker_exec bash -c "psql -U postgres $DB_NAME -E -f $MACHINE_HOME/$WORKLOAD_CUSTOM_FILENAME $VERBOSE_OUTPUT_REDIRECT" - fi -fi -END_TIME=$(date +%s); -DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') -msg "Workload executed for $DURATION." - -## Get statistics -OP_START_TIME=$(date +%s); -msg "Prepare JSON log..." -docker_exec bash -c "/root/pgbadger/pgbadger \ - -j $CPU_CNT \ - --prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr \ - -o $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.json" \ - 2> >(grep -v "install the Text::CSV_XS" >&2) - -docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.log.gz" -docker_exec bash -c "gzip -c /etc/postgresql/$PG_VERSION/main/postgresql.conf > $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.conf.gz" -msg "Save artifacts..." -if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then - docker_exec s3cmd --recursive put /$MACHINE_HOME/$ARTIFACTS_FILENAME $ARTIFACTS_DESTINATION/ - #docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/ - #docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/ - #docker_exec s3cmd put /$MACHINE_HOME/$ARTIFACTS_FILENAME.conf.gz $ARTIFACTS_DESTINATION/ -else - if [[ "$RUN_ON" == "localhost" ]]; then - docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME $ARTIFACTS_DESTINATION/ - #docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/ - #docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/ - #docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME.conf.gz $ARTIFACTS_DESTINATION/ - # TODO option: ln / cp - #cp "$TMP_PATH/nancy_$CONTAINER_HASH/"$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/ - #cp "$TMP_PATH/nancy_$CONTAINER_HASH/"$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/ - elif [[ "$RUN_ON" == "aws" ]]; then - mkdir $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME - docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME/* $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/ - #docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.json $ARTIFACTS_DESTINATION/ - #docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.log.gz $ARTIFACTS_DESTINATION/ - #docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME.conf.gz $ARTIFACTS_DESTINATION/ - else - err "ASSERT: must not reach this point" - exit 1 - fi -fi -END_TIME=$(date +%s); -DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') -msg "Statistics got for $DURATION." - -OP_START_TIME=$(date +%s); -if ([ ! -z ${DELTA_SQL_UNDO+x} ] && [ "$DELTA_SQL_UNDO" != "" ]); then - msg "Apply DDL undo SQL code" - DELTA_SQL_UNDO_FILENAME=$(basename $DELTA_SQL_UNDO) - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres $DB_NAME -b -f $MACHINE_HOME/$DELTA_SQL_UNDO_FILENAME $VERBOSE_OUTPUT_REDIRECT" + msg "pg_base main copied for $DURATION." + + docker_exec bash -c "mkdir /storage/postgresql_hdd" || true + docker_exec bash -c "ln -s /storage/postgresql_hdd/ /var/lib/postgresql_hdd" || true + docker_exec bash -c "rm -rf /storage/postgresql_hdd/*" + OP_START_TIME=$(date +%s); + docker_exec bash -c "cp -r -p -f /storage/backup/pg_base_tblspace/* /storage/postgresql_hdd/" END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "Delta SQL \"UNDO\" code has been applied for $DURATION." + msg "pg_base_16418 copied for $DURATION." + + OP_START_TIME=$(date +%s); + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main/*" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd/*" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd" + docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql" + docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql_hdd" + + docker_exec bash -c "chmod 0700 /var/lib/postgresql/9.6/main/" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Rights changed for $DURATION." + + docker_exec bash -c "localedef -f UTF-8 -i en_US en_US.UTF-8" +} + +function _rsync(){ + docker_exec bash -c "sudo /etc/init.d/postgresql stop" + sleep 10 + OP_START_TIME=$(date +%s); + docker_exec bash -c "rsync -av /storage/backup/pg_base/ /storage/postgresql/9.6/main" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "pg_base main rsync done for $DURATION." + OP_START_TIME=$(date +%s); + docker_exec bash -c "rsync -av /storage/backup/pg_base_tblspace/ /storage/postgresql_hdd/" + END_TIME=$(date +%s); + DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "pg_base_tblspace rsync done for $DURATION." + + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main/*" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd/*" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd" + docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql" + docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql_hdd" + docker_exec bash -c "chmod 0700 /var/lib/postgresql/9.6/main/" + + docker_exec bash -c "sudo /etc/init.d/postgresql start" + sleep 10 + + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database postila_ru rename to test;'" +} + +docker_exec bash -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME" +if [[ "$RUN_ON" == "aws" ]]; then + docker-machine ssh $DOCKER_MACHINE "sudo chmod a+w /home/storage" + MACHINE_HOME="$MACHINE_HOME/storage" + docker_exec bash -c "ln -s /storage/ $MACHINE_HOME" + + msg "Move posgresql to a separate volume" + docker_exec bash -c "sudo /etc/init.d/postgresql stop" + sleep 10 # wait for postgres stopped + docker_exec bash -c "sudo mv /var/lib/postgresql /storage/" + docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql" + + if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + _cp_backup_2_storage; + _dettach_pancake_drive + _cp_backup; + fi + + docker_exec bash -c "sudo /etc/init.d/postgresql start" + sleep 10 # wait for postgres started +fi + +if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database postila_ru rename to test;'" fi -END_TIME=$(date +%s); +[ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \ + && docker_exec cp $MACHINE_HOME/.s3cfg /root/.s3cfg +[ ! -z ${DB_DUMP+x} ] && copy_file $DB_DUMP +[ ! -z ${PG_CONFIG+x} ] && copy_file $PG_CONFIG +[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && copy_file $WORKLOAD_CUSTOM_SQL +[ ! -z ${WORKLOAD_REAL+x} ] && copy_file $WORKLOAD_REAL + +# copy runs config files +runs_count=${#RUNS[*]} +let runs_count=runs_count/3 +i=0 +while : ; do + j=$i*3 + d=$j+1 + u=$j+2 + delta_config=${RUNS[$j]} + delta_ddl_do=${RUNS[$d]} + delta_ddl_undo=${RUNS[$u]} + [[ ! -z "$delta_config" ]] && copy_file $delta_config + [[ ! -z "$delta_ddl_do" ]] && copy_file $delta_ddl_do + [[ ! -z "$delta_ddl_undo" ]] && copy_file $delta_ddl_undo + let i=$i+1 + [[ "$i" -eq "$runs_count" ]] && break; +done + +sleep 10 # wait for postgres up&running +## Apply machine features +apply_commands_after_container_init; +apply_sql_before_db_restore; +restore_dump; # commented for use pancake drive +apply_sql_after_db_restore; +apply_initial_postgres_configuration; + +msg "Start runs..." +runs_count=${#RUNS[*]} +let runs_count=runs_count/3 +i=0 +while : ; do + j=$i*3 + d=$j+1 + u=$j+2 + delta_config=${RUNS[$j]} + delta_ddl_do=${RUNS[$d]} + delta_ddl_undo=${RUNS[$u]} + + #restore database if not first run + if [[ "$" -gt "0" ]]; then + docker_exec bash -c "sudo /etc/init.d/postgresql stop" + sleep 10 + if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + _rsync + else + restore_dump; + fi + docker_exec bash -c "sudo /etc/init.d/postgresql start" + sleep 10 + fi + + # apply delta + [[ ! -z "$delta_config" ]] && apply_delta_configuration $delta_config + [[ ! -z "$delta_ddl_do" ]] && apply_ddl_do_code $delta_ddl_do + + prepare_start_workload $i; + execute_workload; + collect_results $i; + + num=$i+1 + echo -e " Run #$num done." + echo -e " JSON Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.$num.json" + echo -e " HTML Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.$num.html" + echo -e " Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.workload.$num.log.gz" + echo -e " Prepare log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.prepare.$num.log.gz" +echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.$num.conf" + echo -e " -------------------------------------------" + echo -e " Workload summary:" + echo -e " Summarized query duration:\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$num.json | jq '.overall_stat.queries_duration') " ms" + echo -e " Queries:\t\t\t" $( docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$num.json | jq '.overall_stat.queries_number') + echo -e " Query groups:\t\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$num.json | jq '.normalyzed_info| length') + echo -e " Errors:\t\t\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$num.json | jq '.overall_stat.errors_number') + echo -e "-------------------------------------------" + + # revert delta + [[ ! -z "$delta_ddl_undo" ]] && apply_ddl_undo_code $delta_ddl_undo + let i=$i+1 + [[ "$i" -eq "$runs_count" ]] && break; +done + DURATION=$(echo $((END_TIME-START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') -echo -e "$(date "+%Y-%m-%d %H:%M:%S"): Run done for $DURATION" -echo -e " Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.json" -echo -e " Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME.log.gz" -echo -e " -------------------------------------------" -echo -e " Workload summary:" -echo -e " Summarized query duration:\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_duration') " ms" -echo -e " Queries:\t\t\t" $( docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.queries_number') -echo -e " Query groups:\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.json | jq '.normalyzed_info| length') -echo -e " Errors:\t\t\t" $(docker_exec cat /$MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.json | jq '.overall_stat.errors_number') -echo -e "-------------------------------------------" +echo -e "$(date "+%Y-%m-%d %H:%M:%S"): All runs done for $DURATION" From 58f46f0ab7752065b9f2339904adda596c5d95e6 Mon Sep 17 00:00:00 2001 From: Dmitry Date: Mon, 10 Sep 2018 18:14:59 +0300 Subject: [PATCH 2/6] Series runs improvements --- .circleci/run.yml | 13 +++++++++++++ nancy_run.sh | 9 ++++----- 2 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 .circleci/run.yml diff --git a/.circleci/run.yml b/.circleci/run.yml new file mode 100644 index 0000000..27bcfcb --- /dev/null +++ b/.circleci/run.yml @@ -0,0 +1,13 @@ +run: + 0: + delta_ddl_do: select now(); select now(); + delta_ddl_undo: select now(); +# delta_config: max_wal_size = 2048MB + 1: +# delta_ddl_do: select now(); +# delta_ddl_undo: select now(); + delta_config: max_wal_size = 4092MB +# 2: +# delta_ddl_do: select now(); +# delta_ddl_undo: select now(); +# delta_config: max_wal_size = 4092MB \ No newline at end of file diff --git a/nancy_run.sh b/nancy_run.sh index cfcb4dc..833500e 100755 --- a/nancy_run.sh +++ b/nancy_run.sh @@ -1512,8 +1512,8 @@ function prepare_start_workload() { ) if [[ "$run_number" -eq "1" ]]; then # save preparation log only before first run - docker_exec bash -c "mkdir $MACHINE_HOME/$ARTIFACTS_FILENAME" - docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.$run_number.prepare.log.gz" + docker_exec bash -c "mkdir -p $MACHINE_HOME/$ARTIFACTS_FILENAME" + docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.prepare.log.gz" fi # Clear statistics and log @@ -1611,8 +1611,7 @@ function collect_results() { docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_user_functions) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_user_functions.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" docker_exec bash -c "psql -U postgres $DB_NAME -b -c '\copy (select * from pg_stat_xact_user_functions) to /$MACHINE_HOME/$ARTIFACTS_FILENAME/pg_stat_xact_user_functions.$run_number.csv with csv;' $VERBOSE_OUTPUT_REDIRECT" - - docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.workload.log.gz" + docker_exec bash -c "gzip -c $logpath > $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.workload.$run_number.log.gz" docker_exec bash -c "cp /etc/postgresql/$PG_VERSION/main/postgresql.conf $MACHINE_HOME/$ARTIFACTS_FILENAME/postgresql.$run_number.conf" msg "Save artifacts..." if [[ $ARTIFACTS_DESTINATION =~ "s3://" ]]; then @@ -1621,7 +1620,7 @@ function collect_results() { if [[ "$RUN_ON" == "localhost" ]]; then docker cp $CONTAINER_HASH:$MACHINE_HOME/$ARTIFACTS_FILENAME $ARTIFACTS_DESTINATION/ elif [[ "$RUN_ON" == "aws" ]]; then - mkdir $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME + mkdir -p $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME docker-machine scp $DOCKER_MACHINE:/home/storage/$ARTIFACTS_FILENAME/* $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/ else err "ASSERT: must not reach this point" From 17b86cfb4406fa97facb4fabd255288f2ebdcb87 Mon Sep 17 00:00:00 2001 From: Dmitry Date: Mon, 10 Sep 2018 23:31:51 +0300 Subject: [PATCH 3/6] Options renamed --- nancy_run.sh | 57 +++++++++++++++++++++++++++++----------------------- 1 file changed, 32 insertions(+), 25 deletions(-) diff --git a/nancy_run.sh b/nancy_run.sh index 833500e..7241404 100755 --- a/nancy_run.sh +++ b/nancy_run.sh @@ -22,14 +22,14 @@ declare -a RUNS # i - delta_config i+1 delta_ddl_do i+2 delta_ddl_undo function _attach_pancake_drive() { docker-machine ssh $DOCKER_MACHINE "sudo sh -c \"mkdir /home/basedump\"" INSTANCE_ID=$(docker-machine ssh $DOCKER_MACHINE curl -s http://169.254.169.254/latest/meta-data/instance-id) - attachResult=$(aws --region=$AWS_REGION ec2 attach-volume --device /dev/xvdc --volume-id $BACKUP_VOLUME_ID --instance-id $INSTANCE_ID) + attachResult=$(aws --region=$AWS_REGION ec2 attach-volume --device /dev/xvdc --volume-id $DB_EBS_VOLUME_ID --instance-id $INSTANCE_ID) sleep 10 docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdc /home/basedump docker-machine ssh $DOCKER_MACHINE "sudo df -h /dev/xvdc" } function _dettach_pancake_drive() { - dettachResult=$(aws --region=$AWS_REGION ec2 detach-volume --volume-id $BACKUP_VOLUME_ID) + dettachResult=$(aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID) } ####################################### @@ -449,7 +449,7 @@ function check_cli_parameters() { ([[ ! -z ${SQL_BEFORE_DB_RESTORE+x} ]] && [[ -z $SQL_BEFORE_DB_RESTORE ]]) && unset -v SQL_BEFORE_DB_RESTORE ([[ ! -z ${SQL_AFTER_DB_RESTORE+x} ]] && [[ -z $SQL_AFTER_DB_RESTORE ]]) && unset -v SQL_AFTER_DB_RESTORE ([[ ! -z ${AWS_ZONE+x} ]] && [[ -z $AWS_ZONE ]]) && unset -v AWS_ZONE - ([[ ! -z ${RUNS_CONFIG+x} ]] && [[ -z $RUNS_CONFIG ]]) && unset -v RUNS_CONFIG + ([[ ! -z ${CONFIG+x} ]] && [[ -z $CONFIG ]]) && unset -v CONFIG ### CLI parameters checks ### if [[ "$RUN_ON" == "aws" ]]; then @@ -602,15 +602,15 @@ function check_cli_parameters() { fi fi - if [[ ! -z ${RUNS_CONFIG+x} ]]; then + if [[ ! -z ${CONFIG+x} ]]; then #fill runs config - check_path RUNS_CONFIG + check_path CONFIG if [[ "$?" -ne "0" ]]; then err "ERROR: Runs config YML file not found." exit 1; fi # load and parse file - eval $(parse_yaml $RUNS_CONFIG "yml_") + eval $(parse_yaml $CONFIG "yml_") # preload runs config data i=0 while : ; do @@ -708,9 +708,9 @@ function check_cli_parameters() { DELTA_CONFIG="$TMP_PATH/target_config_tmp.conf" fi fi - RUNS[0]=DELTA_CONFIG - RUNS[1]=DELTA_SQL_DO - RUNS[2]=DELTA_SQL_UNDO + RUNS[0]=$DELTA_CONFIG + RUNS[1]=$DELTA_SQL_DO + RUNS[2]=$DELTA_SQL_UNDO fi if [[ -z ${ARTIFACTS_DESTINATION+x} ]]; then @@ -1147,10 +1147,10 @@ while [[ $# -gt 0 ]]; do AWS_ZONE="$2"; shift 2 ;; --aws-block-duration ) AWS_BLOCK_DURATION=$2; shift 2 ;; - --runs-config ) - RUNS_CONFIG=$2; shift 2;; - --backup-volume-id ) - BACKUP_VOLUME_ID=$2; shift 2;; + --config ) + CONFIG=$2; shift 2;; + --db-ebs-volume-id ) + DB_EBS_VOLUME_ID=$2; shift 2;; --s3cfg-path ) S3_CFG_PATH="$2"; shift 2 ;; @@ -1229,7 +1229,7 @@ elif [[ "$RUN_ON" == "aws" ]]; then msg " To connect docker machine use:" msg " docker-machine ssh $DOCKER_MACHINE" - if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then _attach_pancake_drive; fi @@ -1576,7 +1576,7 @@ function collect_results() { docker_exec bash -c "/root/pgbadger/pgbadger \ -j $CPU_CNT \ --prefix '%t [%p]: [%l-1] db=%d,user=%u (%a,%h)' /var/log/postgresql/* -f stderr \ - -o $MACHINE_HOME/$ARTIFACTS_FILENAME/$ARTIFACTS_FILENAME.$run_number.json" \ + -o $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$run_number.json" \ 2> >(grep -v "install the Text::CSV_XS" >&2) msg "Prepare HTML log..." docker_exec bash -c "/root/pgbadger/pgbadger \ @@ -1633,6 +1633,7 @@ function collect_results() { } function _cp_backup_2_storage() { + msg "Copy backup files from pancake to local storage." docker_exec bash -c "mkdir -p /storage/backup/pg_base" docker_exec bash -c "mkdir -p /storage/backup/pg_base_tblspace" @@ -1651,6 +1652,7 @@ function _cp_backup_2_storage() { function _cp_backup() { # Here we think what postgress stopped + msg "Restore(cp) database backup." docker_exec bash -c "rm -rf /var/lib/postgresql/9.6/main/*" OP_START_TIME=$(date +%s); @@ -1686,6 +1688,7 @@ function _cp_backup() { } function _rsync(){ + msg "Restore(rsync) database from backup." docker_exec bash -c "sudo /etc/init.d/postgresql stop" sleep 10 OP_START_TIME=$(date +%s); @@ -1726,7 +1729,7 @@ if [[ "$RUN_ON" == "aws" ]]; then docker_exec bash -c "sudo mv /var/lib/postgresql /storage/" docker_exec bash -c "ln -s /storage/postgresql /var/lib/postgresql" - if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then _cp_backup_2_storage; _dettach_pancake_drive _cp_backup; @@ -1736,7 +1739,7 @@ if [[ "$RUN_ON" == "aws" ]]; then sleep 10 # wait for postgres started fi -if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then +if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database postila_ru rename to test;'" fi @@ -1748,7 +1751,8 @@ fi [ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && copy_file $WORKLOAD_CUSTOM_SQL [ ! -z ${WORKLOAD_REAL+x} ] && copy_file $WORKLOAD_REAL -# copy runs config files +# copy files +msg "Copy files." runs_count=${#RUNS[*]} let runs_count=runs_count/3 i=0 @@ -1770,7 +1774,9 @@ sleep 10 # wait for postgres up&running ## Apply machine features apply_commands_after_container_init; apply_sql_before_db_restore; -restore_dump; # commented for use pancake drive +if [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then + restore_dump; # commented for use pancake drive +fi apply_sql_after_db_restore; apply_initial_postgres_configuration; @@ -1782,15 +1788,17 @@ while : ; do j=$i*3 d=$j+1 u=$j+2 + let num=$i+1 + msg "Start run #$num." delta_config=${RUNS[$j]} delta_ddl_do=${RUNS[$d]} delta_ddl_undo=${RUNS[$u]} #restore database if not first run - if [[ "$" -gt "0" ]]; then + if [[ "$i" -gt "0" ]]; then docker_exec bash -c "sudo /etc/init.d/postgresql stop" sleep 10 - if [[ ! -z ${BACKUP_VOLUME_ID+x} ]]; then + if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then _rsync else restore_dump; @@ -1807,13 +1815,12 @@ while : ; do execute_workload; collect_results $i; - num=$i+1 - echo -e " Run #$num done." + msg "Run #$num done." echo -e " JSON Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.$num.json" echo -e " HTML Report: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/pgbadger.$num.html" echo -e " Query log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.workload.$num.log.gz" echo -e " Prepare log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.prepare.$num.log.gz" -echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.$num.conf" + echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILENAME/postgresql.$num.conf" echo -e " -------------------------------------------" echo -e " Workload summary:" echo -e " Summarized query duration:\t" $(docker_exec cat $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbadger.$num.json | jq '.overall_stat.queries_duration') " ms" @@ -1829,4 +1836,4 @@ echo -e " Postgresql configuration log: $ARTIFACTS_DESTINATION/$ARTIFACTS_FILEN done DURATION=$(echo $((END_TIME-START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') -echo -e "$(date "+%Y-%m-%d %H:%M:%S"): All runs done for $DURATION" +msg "All runs done for $DURATION" From 38906c0c94b22d08b2cb43d511cfc03c598b101c Mon Sep 17 00:00:00 2001 From: Dmitry Date: Tue, 11 Sep 2018 14:24:45 +0300 Subject: [PATCH 4/6] Dettach pancake drive implemented --- nancy_run.sh | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/nancy_run.sh b/nancy_run.sh index 7241404..1dec843 100755 --- a/nancy_run.sh +++ b/nancy_run.sh @@ -28,10 +28,6 @@ function _attach_pancake_drive() { docker-machine ssh $DOCKER_MACHINE "sudo df -h /dev/xvdc" } -function _dettach_pancake_drive() { - dettachResult=$(aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID) -} - ####################################### # Print a help # Globals: @@ -1248,6 +1244,7 @@ elif [[ "$RUN_ON" == "aws" ]]; then CONTAINER_HASH=$( \ docker `docker-machine config $DOCKER_MACHINE` run \ --name="pg_nancy_${CURRENT_TS}" \ + --privileged \ -v /home/ubuntu:/machine_home \ -v /home/storage:/storage \ -v /home/basedump:/basedump \ @@ -1266,6 +1263,13 @@ MACHINE_HOME="/machine_home/nancy_${CONTAINER_HASH}" alias docker_exec='docker $DOCKER_CONFIG exec -i ${CONTAINER_HASH} ' CPU_CNT=$(docker_exec bash -c "cat /proc/cpuinfo | grep processor | wc -l") # for execute in docker + +function _dettach_pancake_drive() { + docker_exec bash -c "umount /basedump" + docker-machine ssh $DOCKER_MACHINE sudo umount /home/basedump + dettachResult=$(aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID) +} + ####################################### # Copy file to container # Globals: From 823e72fca55334c7c1576b9dd43e882dd79be412 Mon Sep 17 00:00:00 2001 From: dmius Date: Mon, 15 Oct 2018 22:11:06 +0300 Subject: [PATCH 5/6] PGbench support and other fixes --- nancy_run.sh | 177 ++++++++++++++++++++++++++++++--------------------- 1 file changed, 103 insertions(+), 74 deletions(-) diff --git a/nancy_run.sh b/nancy_run.sh index 1dec843..4016f31 100755 --- a/nancy_run.sh +++ b/nancy_run.sh @@ -322,6 +322,8 @@ function dbg_cli_parameters() { echo "AWS_EBS_VOLUME_SIZE: $AWS_EBS_VOLUME_SIZE" echo "AWS_REGION: ${AWS_REGION}" echo "AWS_ZONE: ${AWS_ZONE}" + echo "DB_PGBENCH: $DB_PGBENCH" + echo "WORKLOAD_PGBENCH: $WORKLOAD_PGBENCH" fi } @@ -446,6 +448,8 @@ function check_cli_parameters() { ([[ ! -z ${SQL_AFTER_DB_RESTORE+x} ]] && [[ -z $SQL_AFTER_DB_RESTORE ]]) && unset -v SQL_AFTER_DB_RESTORE ([[ ! -z ${AWS_ZONE+x} ]] && [[ -z $AWS_ZONE ]]) && unset -v AWS_ZONE ([[ ! -z ${CONFIG+x} ]] && [[ -z $CONFIG ]]) && unset -v CONFIG + ([[ ! -z ${WORKLOAD_PGBENCH+x} ]] && [[ -z $WORKLOAD_PGBENCH ]]) && unset -v WORKLOAD_PGBENCH + ([[ ! -z ${DB_PGBENCH+x} ]] && [[ -z $DB_PGBENCH ]]) && unset -v DB_PGBENCH ### CLI parameters checks ### if [[ "$RUN_ON" == "aws" ]]; then @@ -549,10 +553,13 @@ function check_cli_parameters() { [[ ! -z ${WORKLOAD_BASIS+x} ]] && let workloads_count=$workloads_count+1 [[ ! -z ${WORKLOAD_REAL+x} ]] && let workloads_count=$workloads_count+1 [[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ]] && let workloads_count=$workloads_count+1 + [[ ! -z ${WORKLOAD_PGBENCH+x} ]] && let workloads_count=$workloads_count+1 - if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]]; then + if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] \ + && [[ -z ${DB_PGBENCH+x} ]] && [[ -z ${DB_EBS_VOLUME_ID+x} ]] \ + && [[ -z ${DB_LOCAL_PGDATA+x} ]]; then err "ERROR: The object (database) is not defined." - exit 1; + exit 1 fi # --workload-real or --workload-basis-path or --workload-custom-sql @@ -593,8 +600,8 @@ function check_cli_parameters() { check_path PG_CONFIG if [[ "$?" -ne "0" ]]; then # TODO(NikolayS) support file:// and s3:// #err "WARNING: Value given as pg_config: '$PG_CONFIG' not found as file will use as content" - echo "$PG_CONFIG" > $TMP_PATH/pg_config_tmp.sql - PG_CONFIG="$TMP_PATH/pg_config_tmp.sql" + echo "$PG_CONFIG" > $TMP_PATH/pg_config_tmp.conf + PG_CONFIG="$TMP_PATH/pg_config_tmp.conf" fi fi @@ -612,6 +619,7 @@ function check_cli_parameters() { while : ; do var_name_config="yml_run_"$i"_delta_config" delta_config=$(eval echo \$$var_name_config) + delta_config=$(echo $delta_config | tr ";" "\n") var_name_ddl_do="yml_run_"$i"_delta_ddl_do" delta_ddl_do=$(eval echo \$$var_name_ddl_do) var_name_ddl_undo="yml_run_"$i"_delta_ddl_undo" @@ -652,8 +660,8 @@ function check_cli_parameters() { if [[ ! -z "$delta_config" ]]; then check_path delta_config if [[ "$?" -ne "0" ]]; then - echo "$delta_config" > $TMP_PATH/target_config_tmp_$i.sql - RUNS[$j]="$TMP_PATH/target_config_tmp_$i.sql" + echo "$delta_config" > $TMP_PATH/target_config_tmp_$i.conf + RUNS[$j]="$TMP_PATH/target_config_tmp_$i.conf" fi fi if [[ ! -z "$delta_ddl_do" ]]; then @@ -894,7 +902,7 @@ function determine_history_ec2_spot_price() { AWS_ZONE=${region:$((${#region}-1)):1} AWS_REGION=${region:0:$((${#region}-1))} msg "Min price from history: $price in $AWS_REGION (zone: $AWS_ZONE)" - multiplier="1.01" + multiplier="1.2" price=$(echo "$price * $multiplier" | bc -l) msg "Increased price: $price" EC2_PRICE=$price @@ -1096,6 +1104,8 @@ while [[ $# -gt 0 ]]; do DB_DUMP="$2"; shift 2 ;; --db-name ) DB_NAME="$2"; shift 2 ;; + --db-pgbench ) + DB_PGBENCH="$2"; shift 2 ;; --commands-after-container-init ) COMMANDS_AFTER_CONTAINER_INIT="$2"; shift 2 ;; --sql-before-db-restore ) @@ -1107,6 +1117,9 @@ while [[ $# -gt 0 ]]; do --workload-custom-sql ) #s3 url|filename|content WORKLOAD_CUSTOM_SQL="$2"; shift 2 ;; + --workload-pgbench ) + #s3 url|filename|content + WORKLOAD_PGBENCH="$2"; shift 2 ;; --workload-real ) #s3 url WORKLOAD_REAL="$2"; shift 2 ;; @@ -1356,25 +1369,29 @@ function apply_sql_before_db_restore() { # None ####################################### function restore_dump() { - OP_START_TIME=$(date +%s); + local op_start_time=$(date +%s) msg "Restore database dump" - case "$DB_DUMP_EXT" in - sql) - docker_exec bash -c "cat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" - ;; - bz2) - docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" - ;; - gz) - docker_exec bash -c "zcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" - ;; - pgdmp) - docker_exec bash -c "pg_restore -j $CPU_CNT --no-owner --no-privileges -U postgres -d $DB_NAME $MACHINE_HOME/$DB_DUMP_FILENAME" || true - ;; - esac - END_TIME=$(date +%s); - DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "Database dump restored for $DURATION." + if ([ ! -z ${DB_PGBENCH+x} ]); then + docker_exec bash -c "pgbench -i $DB_PGBENCH -U postgres $DB_NAME" || true + else + case "$DB_DUMP_EXT" in + sql) + docker_exec bash -c "cat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" + ;; + bz2) + docker_exec bash -c "bzcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" + ;; + gz) + docker_exec bash -c "zcat $MACHINE_HOME/$DB_DUMP_FILENAME | psql --set ON_ERROR_STOP=on -U postgres $DB_NAME $VERBOSE_OUTPUT_REDIRECT" + ;; + pgdmp) + docker_exec bash -c "pg_restore -j $CPU_CNT --no-owner --no-privileges -U postgres -d $DB_NAME $MACHINE_HOME/$DB_DUMP_FILENAME" || true + ;; + esac + fi + local end_time=$(date +%s) + local duration=$(echo $((end_time-op_start_time)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') + msg "Time taken to restore database: $duration." } ####################################### @@ -1521,8 +1538,12 @@ function prepare_start_workload() { fi # Clear statistics and log - msg "Execute vacuumdb..." - docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze + if [[ -z ${WORKLOAD_PGBENCH+x} ]]; then + msg "Execute vacuumdb..." + docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze + fi + #msg "Execute vacuumdb..." + #docker_exec vacuumdb -U postgres $DB_NAME -j $CPU_CNT --analyze docker_exec bash -c "echo '' > /var/log/postgresql/postgresql-$PG_VERSION-main.log" } @@ -1537,6 +1558,7 @@ function prepare_start_workload() { # None ####################################### function execute_workload() { + local run_number=$1 # Execute workload OP_START_TIME=$(date +%s); msg "Execute workload..." @@ -1548,8 +1570,10 @@ function execute_workload() { if [[ ! -z ${WORKLOAD_REAL_REPLAY_SPEED+x} ]] && [[ "$WORKLOAD_REAL_REPLAY_SPEED" != '' ]]; then docker_exec bash -c "pgreplay -r -s $WORKLOAD_REAL_REPLAY_SPEED $MACHINE_HOME/$WORKLOAD_FILE_NAME" else - docker_exec bash -c "pgreplay -r -j $MACHINE_HOME/$WORKLOAD_FILE_NAME" + docker_exec bash -c "pgreplay -r $MACHINE_HOME/$WORKLOAD_FILE_NAME" fi + elif [ ! -z ${WORKLOAD_PGBENCH+x} ]; then + docker_exec bash -c "pgbench $WORKLOAD_PGBENCH -U postgres $DB_NAME | tee $MACHINE_HOME/$ARTIFACTS_FILENAME/pgbench.$run_number.txt" else if ([ ! -z ${WORKLOAD_CUSTOM_SQL+x} ] && [ "$WORKLOAD_CUSTOM_SQL" != "" ]); then WORKLOAD_CUSTOM_FILENAME=$(basename $WORKLOAD_CUSTOM_SQL) @@ -1639,19 +1663,19 @@ function collect_results() { function _cp_backup_2_storage() { msg "Copy backup files from pancake to local storage." docker_exec bash -c "mkdir -p /storage/backup/pg_base" - docker_exec bash -c "mkdir -p /storage/backup/pg_base_tblspace" +docker_exec bash -c "mkdir -p /storage/backup/pg_base_tblspace" OP_START_TIME=$(date +%s); - docker_exec bash -c "cp -r -p -f /basedump/pg_base/* /storage/backup/pg_base/" + docker_exec bash -c "cp -r -p -f /basedump/pg_base/* /storage/backup/pg_base/" || true END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') msg "pg_base copied for $DURATION to storage." - OP_START_TIME=$(date +%s); - docker_exec bash -c "cp -r -p -f /basedump/pg_base_tblspace/* /storage/backup/pg_base_tblspace/" - END_TIME=$(date +%s); - DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "pg_base_16418 copied for $DURATION to storage." +OP_START_TIME=$(date +%s); +docker_exec bash -c "cp -r -p -f /basedump/pg_base_tblspace/* /storage/backup/pg_base_tblspace/" || true +END_TIME=$(date +%s); +DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') +msg "pg_base_16418 copied for $DURATION to storage." } function _cp_backup() { @@ -1660,35 +1684,36 @@ function _cp_backup() { docker_exec bash -c "rm -rf /var/lib/postgresql/9.6/main/*" OP_START_TIME=$(date +%s); - docker_exec bash -c "rm -rf /var/lib/postgresql/$PG_VERSION/main/*" - docker_exec bash -c "cp -r -p -f /storage/backup/pg_base/* /storage/postgresql/$PG_VERSION/main/" + docker_exec bash -c "rm -rf /var/lib/postgresql/$PG_VERSION/main/*" || true + docker_exec bash -c "cp -r -p -f /storage/backup/pg_base/* /storage/postgresql/$PG_VERSION/main/" || true END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') msg "pg_base main copied for $DURATION." - docker_exec bash -c "mkdir /storage/postgresql_hdd" || true - docker_exec bash -c "ln -s /storage/postgresql_hdd/ /var/lib/postgresql_hdd" || true - docker_exec bash -c "rm -rf /storage/postgresql_hdd/*" - OP_START_TIME=$(date +%s); - docker_exec bash -c "cp -r -p -f /storage/backup/pg_base_tblspace/* /storage/postgresql_hdd/" - END_TIME=$(date +%s); - DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "pg_base_16418 copied for $DURATION." +docker_exec bash -c "mkdir /storage/postgresql_hdd" || true +docker_exec bash -c "ln -s /storage/postgresql_hdd/ /var/lib/postgresql_hdd" || true +docker_exec bash -c "rm -rf /storage/postgresql_hdd/*" +OP_START_TIME=$(date +%s); +docker_exec bash -c "cp -r -p -f /storage/backup/pg_base_tblspace/* /storage/postgresql_hdd/" +END_TIME=$(date +%s); +DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') +msg "pg_base_16418 copied for $DURATION." OP_START_TIME=$(date +%s); - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main/*" - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main" - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd/*" - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd" - docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql" - docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql_hdd" - - docker_exec bash -c "chmod 0700 /var/lib/postgresql/9.6/main/" + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main/*" || true + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main" || true +docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd/*" || true +docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd" || true + docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql" || true +docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql_hdd" || true + + docker_exec bash -c "chmod 0700 /var/lib/postgresql/9.6/main/" || true END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') msg "Rights changed for $DURATION." docker_exec bash -c "localedef -f UTF-8 -i en_US en_US.UTF-8" + docker_exec bash -c "localedef -f UTF-8 -i ru_RU ru_RU.UTF-8" } function _rsync(){ @@ -1696,29 +1721,31 @@ function _rsync(){ docker_exec bash -c "sudo /etc/init.d/postgresql stop" sleep 10 OP_START_TIME=$(date +%s); - docker_exec bash -c "rsync -av /storage/backup/pg_base/ /storage/postgresql/9.6/main" + docker_exec bash -c "rsync -av /storage/backup/pg_base/ /storage/postgresql/9.6/main" || true END_TIME=$(date +%s); DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') msg "pg_base main rsync done for $DURATION." - OP_START_TIME=$(date +%s); - docker_exec bash -c "rsync -av /storage/backup/pg_base_tblspace/ /storage/postgresql_hdd/" - END_TIME=$(date +%s); - DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') - msg "pg_base_tblspace rsync done for $DURATION." - - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main/*" - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main" - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd/*" - docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd" - docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql" - docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql_hdd" - docker_exec bash -c "chmod 0700 /var/lib/postgresql/9.6/main/" +OP_START_TIME=$(date +%s); +docker_exec bash -c "rsync -av /storage/backup/pg_base_tblspace/ /storage/postgresql_hdd/" || true +END_TIME=$(date +%s); +DURATION=$(echo $((END_TIME-OP_START_TIME)) | awk '{printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}') +msg "pg_base_tblspace rsync done for $DURATION." + + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main/*" || true + docker_exec bash -c "chown -R postgres:postgres /storage/postgresql/$PG_VERSION/main" || true +docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd/*" || true +docker_exec bash -c "chown -R postgres:postgres /storage/postgresql_hdd" || true + docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql" || true +docker_exec bash -c "chown -R postgres:postgres /var/lib/postgresql_hdd" || true + docker_exec bash -c "chmod 0700 /var/lib/postgresql/9.6/main/" || true docker_exec bash -c "sudo /etc/init.d/postgresql start" sleep 10 - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database postila_ru rename to test;'" + if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]] && [[ ! -z ${ORIGINAL_DB_NAME+x} ]] && [[ ! "$ORIGINAL_DB_NAME" == "test" ]]; then + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database $ORIGINAL_DB_NAME rename to test;'" + fi } docker_exec bash -c "mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME" @@ -1743,9 +1770,11 @@ if [[ "$RUN_ON" == "aws" ]]; then sleep 10 # wait for postgres started fi -if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then +if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]] && [[ ! "$DB_NAME" == "test" ]]; then docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" - docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database postila_ru rename to test;'" + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'alter database $DB_NAME rename to test;'" + ORIGINAL_DB_NAME=$DB_NAME + DB_NAME=test fi [ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \ @@ -1778,8 +1807,8 @@ sleep 10 # wait for postgres up&running ## Apply machine features apply_commands_after_container_init; apply_sql_before_db_restore; -if [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then - restore_dump; # commented for use pancake drive +if ([[ ! -z ${DB_DUMP+x} ]] || [[ ! -z ${DB_PGBENCH+x} ]]) && [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then + restore_dump fi apply_sql_after_db_restore; apply_initial_postgres_configuration; @@ -1816,7 +1845,7 @@ while : ; do [[ ! -z "$delta_ddl_do" ]] && apply_ddl_do_code $delta_ddl_do prepare_start_workload $i; - execute_workload; + execute_workload $i; collect_results $i; msg "Run #$num done." From 8b6cb21365158dddcb0b52bd2758e2a8b3bc01b2 Mon Sep 17 00:00:00 2001 From: dmius Date: Wed, 17 Oct 2018 20:35:13 +0300 Subject: [PATCH 6/6] Restore database fix --- nancy_run.sh | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/nancy_run.sh b/nancy_run.sh index 4016f31..d323a6e 100755 --- a/nancy_run.sh +++ b/nancy_run.sh @@ -902,7 +902,7 @@ function determine_history_ec2_spot_price() { AWS_ZONE=${region:$((${#region}-1)):1} AWS_REGION=${region:0:$((${#region}-1))} msg "Min price from history: $price in $AWS_REGION (zone: $AWS_ZONE)" - multiplier="1.2" + multiplier="1.1" price=$(echo "$price * $multiplier" | bc -l) msg "Increased price: $price" EC2_PRICE=$price @@ -1371,8 +1371,11 @@ function apply_sql_before_db_restore() { function restore_dump() { local op_start_time=$(date +%s) msg "Restore database dump" + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'" + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c 'create database test;'" if ([ ! -z ${DB_PGBENCH+x} ]); then docker_exec bash -c "pgbench -i $DB_PGBENCH -U postgres $DB_NAME" || true + docker_exec bash -c "psql --set ON_ERROR_STOP=on -U postgres -c \"SELECT pg_size_pretty( pg_database_size('test') );\"" else case "$DB_DUMP_EXT" in sql) @@ -1806,12 +1809,12 @@ done sleep 10 # wait for postgres up&running ## Apply machine features apply_commands_after_container_init; +apply_initial_postgres_configuration; apply_sql_before_db_restore; if ([[ ! -z ${DB_DUMP+x} ]] || [[ ! -z ${DB_PGBENCH+x} ]]) && [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then restore_dump fi apply_sql_after_db_restore; -apply_initial_postgres_configuration; msg "Start runs..." runs_count=${#RUNS[*]} @@ -1829,14 +1832,14 @@ while : ; do #restore database if not first run if [[ "$i" -gt "0" ]]; then - docker_exec bash -c "sudo /etc/init.d/postgresql stop" sleep 10 if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then + docker_exec bash -c "sudo /etc/init.d/postgresql stop" _rsync + docker_exec bash -c "sudo /etc/init.d/postgresql start" else restore_dump; fi - docker_exec bash -c "sudo /etc/init.d/postgresql start" sleep 10 fi