Skip to content
Browse files

This commit was manufactured by cvs2svn to create tag 'REL_1_2_0_RC3'.

  • Loading branch information...
2 parents af996e9 + 52f6f29 commit 4267a7651f6cef01a74589b724fc86006ca60254 cvs2svn committed Sep 7, 2006
View
12 tests/testmultiplemoves/README
@@ -0,0 +1,12 @@
+$Id: README,v 1.2 2006-08-18 18:59:40 cbbrowne Exp $
+
+testmultiplemoves is a test that exercises MOVE SET on a 3 node
+cluster with 10 replication sets.
+
+The interesting bit is that it requests MOVE SET on all 10 sets...
+
+This may be expected to "stress" things such as...
+
+- Auto generation of partial indexes on sl_log_? tables
+- Locking and unlocking sets
+
View
96 tests/testmultiplemoves/generate_dml.sh
@@ -0,0 +1,96 @@
+. support_funcs.sh
+
+init_dml()
+{
+ echo "init_dml()"
+}
+
+begin()
+{
+ echo "begin()"
+}
+
+rollback()
+{
+ echo "rollback()"
+}
+
+commit()
+{
+ echo "commit()"
+}
+
+generate_initdata()
+{
+ numrows=$(random_number 50 1000)
+ i=0;
+ trippoint=`expr $numrows / 20`
+ j=0;
+ percent=0
+ status "generating ${numrows} tranactions of random data"
+ percent=`expr $j \* 5`
+ status "$percent %"
+ GENDATA="$mktmp/generate.data"
+ while : ; do
+ for set in 1 2 3; do
+ txtalen=$(random_number 1 100)
+ txta=$(random_string ${txtalen})
+ txta=`echo ${txta} | sed -e "s/\\\\\\\/\\\\\\\\\\\\\\/g" -e "s/'/''/g"`
+ txtblen=$(random_number 1 100)
+ txtb=$(random_string ${txtblen})
+ txtb=`echo ${txtb} | sed -e "s/\\\\\\\/\\\\\\\\\\\\\\/g" -e "s/'/''/g"`
+ echo "INSERT INTO table${set}(data) VALUES ('${txta}');" >> $GENDATA
+ echo "INSERT INTO table${set}a(table${set}_id,data) SELECT id, '${txtb}' FROM table${set} WHERE data='${txta}';" >> $GENDATA
+ done
+ if [ ${i} -ge ${numrows} ]; then
+ break;
+ else
+ i=$((${i} +1))
+ working=`expr $i % $trippoint`
+ if [ $working -eq 0 ]; then
+ j=`expr $j + 1`
+ percent=`expr $j \* 5`
+ status "$percent %"
+ fi
+ fi
+ done
+ status "done"
+}
+
+do_initdata()
+{
+ originnode=${ORIGINNODE:-"1"}
+ eval db=\$DB${originnode}
+ eval host=\$HOST${originnode}
+ eval user=\$USER${originnode}
+ eval port=\$PORT${originnode}
+ generate_initdata
+ launch_poll
+ status "loading data"
+ $pgbindir/psql -h $host -p $port -d $db -U $user < $mktmp/generate.data 1> $mktmp/initdata.log 2> $mktmp/initdata.log
+ if [ $? -ne 0 ]; then
+ warn 3 "do_initdata failed, see $mktmp/initdata.log for details"
+ fi
+
+ status "Move sets to node 3"
+ init_preamble
+ cat ${testname}/move_sets.ik >> $mktmp/slonik.script
+ do_ik
+ status "Completed moving sets"
+ stop_poll
+
+ ORIGINNODE=3
+ originnode=3
+ eval db=\$DB${originnode}
+ eval host=\$HOST${originnode}
+ eval user=\$USER${originnode}
+ eval port=\$PORT${originnode}
+
+ status "Generate some more data, switching origin to node 3"
+ generate_initdata
+ status "loading extra data"
+ launch_poll
+
+ $pgbindir/psql -h $host -U $user -d $db < $mktmp/generate.data 1> $mktmp/initdata.log 2> $mktmp/initdata.log
+ status "done"
+}
View
4 tests/testmultiplemoves/init_add_tables.ik
@@ -0,0 +1,4 @@
+set add table (id=1, set id=1, origin=1, fully qualified name = 'public.table1', comment='a table');
+set add table (id=2, set id=1, origin=1, fully qualified name = 'public.table1a', comment='a table');
+set add table (id=3, set id=2, origin=1, fully qualified name = 'public.table2', comment='a table');
+set add table (id=4, set id=2, origin=1, fully qualified name = 'public.table2a', comment='a table');
View
1 tests/testmultiplemoves/init_cluster.ik
@@ -0,0 +1 @@
+init cluster (id=1, comment = 'Regress test node');
View
3 tests/testmultiplemoves/init_create_set.ik
@@ -0,0 +1,3 @@
+create set (id=1, origin=1, comment='all tables in set 1');
+create set (id=2, origin=1, comment='all tables in set 2');
+
View
10 tests/testmultiplemoves/init_data.sql
@@ -0,0 +1,10 @@
+INSERT INTO table1(data) VALUES ('placeholder a');
+INSERT INTO table1(data) VALUES ('placeholder b');
+INSERT INTO table1a(table1_id,data) VALUES (1,'placeholder a');
+INSERT INTO table1a(table1_id,data) VALUES (2,'placeholder b');
+
+INSERT INTO table2(data) VALUES ('placeholder a');
+INSERT INTO table2(data) VALUES ('placeholder b');
+INSERT INTO table2a(table2_id,data) VALUES (1,'placeholder a');
+INSERT INTO table2a(table2_id,data) VALUES (2,'placeholder b');
+
View
8 tests/testmultiplemoves/init_schema.sql
@@ -0,0 +1,8 @@
+-- $Id: init_schema.sql,v 1.2 2006-09-07 13:07:27 xfade Exp $
+
+create table table1 (id serial primary key, data text);
+create table table2 (id serial primary key, data text);
+
+create table table1a (id serial primary key, table1_id int4 references table1a(id) on update cascade on delete cascade, data text);
+create table table2a (id serial primary key, table2_id int4 references table2a(id) on update cascade on delete cascade, data text);
+
View
9 tests/testmultiplemoves/init_subscribe_set.ik
@@ -0,0 +1,9 @@
+subscribe set (id=1, provider=1, receiver = 2, forward=yes);
+wait for event (origin=all, confirmed=all, wait on=1);
+subscribe set (id=2, provider=1, receiver = 2, forward=yes);
+wait for event (origin=all, confirmed=all, wait on=1);
+
+subscribe set (id=1, provider=2, receiver = 3, forward=yes);
+wait for event (origin=all, confirmed=all, wait on=2);
+subscribe set (id=2, provider=2, receiver = 3, forward=yes);
+wait for event (origin=all, confirmed=all, wait on=2);
View
4 tests/testmultiplemoves/move_sets.ik
@@ -0,0 +1,4 @@
+lock set (id=1, origin=1);
+move set (id=1, old origin=1, new origin=3);
+lock set (id=2, origin=1);
+move set (id=2, old origin=1, new origin=3);
View
4 tests/testmultiplemoves/schema.diff
@@ -0,0 +1,4 @@
+select 'table1', id, data from table1 order by id
+select 'table1a', id, table1_id, data from table1a order by id
+select 'table2', id, data from table2 order by id
+select 'table2a', id, table2_id, data from table2a order by id
View
4 tests/testmultiplemoves/settings.ik
@@ -0,0 +1,4 @@
+NUMCLUSTERS=${NUMCLUSTERS:-"1"}
+NUMNODES=${NUMNODES:-"3"}
+ORIGINNODE=1
+WORKERS=${WORKERS:-"1"}
View
39 tools/pull-gborg-mail.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# $Id: pull-gborg-mail.sh,v 1.1 2006-09-07 16:27:28 cbbrowne Exp $
+# This script downloads email archives from gBorg
+# Parameters:
+# 1. If need be, change ARCHHOME to be some more suitable location for mailing list archives
+# 2. Optional single argument: INIT
+#
+# If you run "bash pull-gborg-mail.sh INIT", it will go through
+# all the years of the Slony-I project at gBorg, and download
+# each month's file.
+#
+# If you pass no value, it will pull the current month's email
+# archives, overwriting the existing copies.
+
+ARCHHOME=$HOME/Slony-I/MailingListArchives
+
+ARG=$1
+
+if [[ x$ARG == "xINIT" ]] ; then
+ for year in 2004 2005 2006; do
+ for month in January February March April May June July August September October November December; do
+ for arch in commit general; do
+ DIR=${ARCHHOME}/${arch}
+ mkdir -p $DIR
+ wget -O $DIR/${year}-${month}.txt http://gborg.postgresql.org/pipermail/slony1-${arch}/${year}-${month}.txt
+ done
+ done
+ done
+else
+ for year in `date +"%Y"`; do
+ for month in `date +"%B"`; do
+ for arch in commit general; do
+ DIR=${ARCHHOME}/${arch}
+ mkdir -p $DIR
+ wget -O $DIR/${year}-${month}.txt http://gborg.postgresql.org/pipermail/slony1-${arch}/${year}-${month}.txt
+ done
+ done
+ done
+fi

0 comments on commit 4267a76

Please sign in to comment.
Something went wrong with that request. Please try again.