From d44ea22a9f40eeb5c9a447f0e950b6fed236a290 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Peter=20Ko=C5=A1ovan?= Date: Mon, 13 Feb 2012 11:49:59 +0100 Subject: [PATCH 1/3] Update to file samples/correlation.tcl and to UG The sample file was badly outdated, relating to an ancient implementation of the correlator. Still parts of it need to be re-done because I do not get what they were supposed to do. UG section on correlator and analysis was updated and made more complete. Still not finished, though. Also commented out some forgotten debug messages printed to stderr by the correlator. --- doc/ug/analysis.tex | 145 ++++++++++++++++++--------- samples/correlation.tcl | 187 +++++++++++++++++++++++------------ src/statistics_correlation.c | 2 - 3 files changed, 223 insertions(+), 111 deletions(-) diff --git a/doc/ug/analysis.tex b/doc/ug/analysis.tex index a27afd4968a..7429624e137 100644 --- a/doc/ug/analysis.tex +++ b/doc/ug/analysis.tex @@ -1048,33 +1048,55 @@ \subsection{Observables} \todo{Formatted printing is not fully supported yet.} \variant{3} Deletes the observable and makes the $id$ free for a new one. +\todo{Does not work yet} \subsubsection{Implemented observables and their arguments} -Currently the following observables are implemented: +Currently the following observables are implemented. +Particle specifications define a group of particles, from which +the observable should be calculated. They are generic to all +observables and are described after the list of observables. -\todo{Missing description of parameters} +\todo{Missing descriptions of parameters of several observables} \begin{itemize} - \item \lit{particle_positions} \\ - Types or ids have to be specified as described above. - \item \lit{particle_velocities}\\ - Types or ids have to be specified as described above. - \item \lit{com_velocity} - \item \lit{com_position} - \item \lit{particle_positions} - \item \lit{particle_positions_conditional} \todo{this has been lost in a git stash and has to be re-implemented.} - \item \lit{stress_tensor} - \item \lit{stress_tensor_acf_obs} - \item \lit{particle_currents} - \item \lit{currents} - \item \lit{dipole_moment} - \item \lit{structure_factor} - \item \lit{interacts_with types \var{type\_list\_1} types \var{type\_list\_2} \var{cutoff} } \\ - For each particle belonging to \var{type\_list\_1} the observable is unity if - a neighbour of a type from \var{type\_list\_2} is found within the distance - defined by the \var{cutoff}. If no such neighbour is found, the observable is - zero. - The observable has as one dimension per each particle which has a type - given in the type list. + \item \lit{particle_positions} \var{particle\_specifications}\\ + Positions of the particles, in the format + $x_1,\ y_1,\ z_1,\ x_2,\ y_2,\ z_2,\ \dots\ x_n,\ y_n,\ z_n$. The particles + are ordered ascending according to their ids. + \item \lit{particle_velocities} \var{particle\_specifications}\\ + Velocities of the particles, in the format\\ + $v^x_1,\ v^y_1,\ v^z_1,\ v^x_2,\ v^y_2,\ v^z_2,\ + \dots\ v^x_n,\ v^y_n,\ v^z_n$. + The particles are ordered ascending according to their ids. + \item \lit{com_velocity} \var{particle\_specifications}\\ + Velocity of the centre of mass + \item \lit{com_position} \var{particle\_specifications}\\ + Position of the centre of mass + \item \lit{stress_tensor} \\ + The stress tensor. It only works with all particles. + It is returned as a 9-dimensional array:\\ + $ \{\ \sigma_{xx},\ \sigma_{xy},\ \sigma_{xz},\ \sigma_{yx},\ \sigma_{yy},\ + \sigma_{yz},\ \sigma_{zx},\ \sigma_{zy},\ \sigma_{zz}\ \} $ + \item \lit{stress_tensor_acf_obs} \\ + The observable for computation of the Stress tensor autocorrelation function. + Same as stress tensor, it only works with all particles. + It is returned as a 6-dimensional array:\\ + $ \{\ \sigma_{xy},\ \sigma_{yz},\ \sigma_{zx},\ + ( \sigma_{xx} - \sigma_{yy}),\ + ( \sigma_{xx} - \sigma_{zz}),\ + ( \sigma_{yy} - \sigma_{zz})\ + \} $ \\ + where $\sigma_{ij}$ are the components of the stress tensor. + \item \lit{particle_currents} \var{particle\_specifications}\\ + \item \lit{currents} \var{particle\_specifications}\\ + \item \lit{dipole_moment} \var{particle\_specifications}\\ + \item \lit{structure_factor} \var{particle\_specifications}\\ + \item \lit{interacts_with} \var{particle\_specifications\_1} \var{particle\_specifications\_2} \var{cutoff} \\ + For each particle belonging to \var{particle\_specifications\_1} + the observable is unity if a neighbour of a type from + \var{particle\_specifications\_2} is found within the distance + defined by the \var{cutoff}. If no such neighbour is found, the + observable is zero. The observable has as one dimension per each + particle of \var{particle\_specifications\_1} \item \lit{nearest_neighbour_conditional} \\ For each particle belonging to \var{type\_list\_1} return the particle id of the nearest neighbour which has a particle type from \var{type\_list\_2}. @@ -1091,31 +1113,37 @@ \subsubsection{Implemented observables and their arguments} \item \lit{radial_flux_density_profile} \item \lit{flux_density_profile} \item \lit{lb_radial_velocity_profile} - \todo{the following observables do not work propely yet} \item \lit{textfile \var{textfilename} \opt{\var{column\_1} \dots \var{column\_n} }} This option allows to read data from an arbitrary text file, organized in columns. The name of the textfile is + \todo{Texfile input observable not fully supported yet!} \item \lit{tclinput \var{dimQ} } TCL input of length \var{dimQ} is used as ``observable''. + \todo{Tcl input observable not fully supported yet!} \end{itemize} -The generic arguments to any observable for which they make sense are +\minisec{Particle specifications} +You can specify from which particles the observable should be computed in one of +the following ways. In all cases, particle specifications refer to the current +state of espresso. Any later changes to particles (additions, deletions, changes +of types) will not be automatically reflected in the observable. \begin{itemize} \item \lit{all} \\ Requests observable calculation based on all particles in the system. \item \lit{types} \var{ type\_list } \\ Restricts observable calculation to a given particle type(s). The type list is a tcl list of existing particle types. - \todo{All should not be a keyword but a default value?} \item \lit{id} \var{ id\_list } \\ Restricts observable calculation to a given list of particle id(s). The id list is a tcl list of existing particle ids. - \todo{the following two do not really work yet} - \item \lit{blocks} \var{ m } \\ - From an $n$-dimensional observable craeates an $n/m$-dimensional one by - averaging over $m$ neighbouring entries in the data array. - \item \lit{strides} \var{ m } \\ - From an $n$-dimensional observable craeates an $n/m$-dimensional one by - averaging over entries which are separated by $m$ in the data array. +% The following two particle specifications have not been implemented yet +% \item \lit{blocks} \var{ m } \\ +% From an $n$-dimensional observable craeates an $n/m$-dimensional one by +% averaging over $m$ neighbouring entries in the data array. +% \todo{Not fully supported yet!} +% \item \lit{strides} \var{ m } \\ +% From an $n$-dimensional observable craeates an $n/m$-dimensional one by +% averaging over entries which are separated by $m$ in the data array. +% \todo{Not fully supported yet!} \end{itemize} @@ -1145,28 +1173,24 @@ \subsection{Correlations} Variants \variant{1} to \variant{5} operate only on existing correlations. Variant\variant{6} creates an new correlation. -Variant \variant{0} currently prints short help on using the -correlations. In future it should return the parameters of all -correlations which are currently defined. -\todo{fix} +Variant \variant{0} +Returns a tcl list of the defined correlations including their parameters. +\todo{Maybe not all parameters are printed.} Variant \variant{1} returns the number of currently defined correlations. -Variant \variant{2} updates the correlation estimates. Using this is -deprecated, unless you are updating your correlations using data from -the script level. - -Variant \variant{2} starts or stops automatically updating the -correlation estimates. The update frequency is automatically adjusted +Variant \variant{2} +with \lit{autoupdate \{start | stop\}} it starts or stops automatically updating +the correlation estimates. The update frequency is automatically adjusted based based on the value of \var{dt} provided when defining the correlation. With \lit{update} it updates the correlation estimates based on the instantaneous state of the system. -A correlation cen be either in autoupdate or manual update regime but not in both. +A correlation can be either in autoupdate or manual update regime but not in both. In the manual update mode it is the user's responsibility to provide samples in the proper time interval. The correlator has no way to check for it. It is technically possible to stop autoupdating and start updating manually but -make if you want to do it, make sure that you exactly know what you are doing. +if you opt for it, make sure that you exactly know what you are doing. Variant \variant{3} correlates all data from history which are left in the buffers. Once this has been done, the history is lost and no @@ -1180,12 +1204,25 @@ \subsection{Correlations} estimate as a Tcl variable. Variant \variant{5} writes the current status of the correlation -estimate to the specified filename. +estimate to the specified filename. If the file exists, its contents will +be owerwritten. Variant \variant{6} defines a new correlation and returns an integer $id$ which has been assigned to it. Its further arguments are described below. +\minisec{Output format} + +The output looks as follows: +\begin{code} +tau1 n_samples C1 C2 ... Cn +tau2 n_samples C1 C2 ... Cn +\end{code} +Where each line corresponds to a given value of \lit{tau}, \lit{n_samples} is the number +of samples which contributed to the correlation at this level and $C_i$ are the individual +components of the correlation. + + \begin{arguments} \item \lit{obs1} and \lit{obs2} \\ are ids of the observables A and B that are to correlated. The ids have to refer to existing @@ -1197,7 +1234,17 @@ \subsection{Correlations} The operation that is performed on $A(t)$ and $B(t+\tau)$ to obtain $C(\tau)$. The following operations are currently is available: \begin{itemize} - \item List them here! \todo{write the list} + \item \lit{scalar_product} \\ + Scalar product of $A$ and $B$, \ie $C=\sum\limits_{i} A_i B_i$ + \item \lit{componentwise_product} \\ + Comnponentwise product of $A$ and $B$, \ie $C_i = A_i B_i$ + \item \lit{square_distance_componentwise} \\ + Each component of the correlation vector is the square of the difference between the + corresponding components of the observables, \ie $C_i = (A_i-B_i)^2$. + Example: when $A$ is \lit{particle_positions}, it produces the mean square displacement + (for each componnent separately). + \item \lit{complex_conjugate_product} + %\item List them here! \todo{write the list} \end{itemize} \item \lit{dt} \\ The time interval of sampling data points. When autoupdate is used, \var{dt} has @@ -1249,7 +1296,9 @@ \subsubsection{Multiple tau correlator} help spreading the message. \begin{figure}[ht] -%\includegraphics[width=7cm]{figures/data_set_N} +\begin{center} +\includegraphics[width=0.9\textwidth]{figures/correlator_scheme} +\end{center} \caption{Schematic representation of buffers in the correlator.} \label{fig:dataSet} \end{figure} diff --git a/samples/correlation.tcl b/samples/correlation.tcl index 83b3330b3f8..1a370741c6d 100644 --- a/samples/correlation.tcl +++ b/samples/correlation.tcl @@ -21,90 +21,155 @@ ## This is a sample script that shows how the correlation ## module is supposed to work. It should be extended soon, but -## should already give an idea on how the correlations are supposed to -## work. +## should already give an overview of the correlation engine. -## First set up 100 particle in a simulation box +## First set up particles in a simulation box ## with (not so important MD parameters) set box_l 10. setmd box_l $box_l $box_l $box_l set friction 1.0 -set force 1. -thermostat langevin 1. $friction -setmd time_step 0.01 +set time_step 0.01 +set temperature 1.0 +set run_time 10000; +set int_steps 100; # number of steps per integration round + +thermostat langevin $temperature $friction +setmd time_step $time_step setmd skin 0.1 t_random seed [ pid ] -part 0 pos 0. 0. 0. +# set up some non-interacting particles of type 0 +part 0 pos 0. 0. 0. type 0 +part 1 pos 1. 1. 1. type 0 + +# Define some observables +################################ + +#velocities of particles of type 0 +set vel [observable new particle_velocities type [list 0]] + +# positions of all particles +set pos [observable new particle_positions all] + +# center of mass of particles with ids 0 and 2 +set com_pos [observable new com_position id [list 0 1]] -## Now we set up different correlation all calculating the VACF of particle one -## They all do the same, but show different ways of how the correlation method can be applied. -## Note that the particles for the correlation must be already created! -analyze correlation 0 first_obs particle_velocities id { 0 } second_obs particle_velocities id { 0 } corr_operation scalar_product tau_lin 20 tau_max 100 delta_t [ setmd time_step ] compress1 discard1 -analyze correlation 1 first_obs particle_velocities id { 0 } corr_operation scalar_product tau_lin 20 tau_max 100. delta_t [ setmd time_step ] compress1 discard1 -analyze correlation 1 autoupdate start -analyze correlation 2 first_obs tclinput 3 corr_operation scalar_product tau_lin 20 tau_max 100. delta_t [ setmd time_step ] compress1 discard1 +# Particle specifications always refer to currently existing particles +# and are internally translated to a list of particle ids +# if we add more particles later, they will not be accounted for -## We set up a second particle of which we measure the mean square displacement -part 1 pos 1. 1. 1. -analyze correlation 3 first_obs particle_velocities id { 1 } corr_operation square_distance tau_lin 20 tau_max 100 delta_t [ setmd time_step ] compress1 discard1 -analyze correlation 3 autoupdate start +# velocity autocorrelation function of particles of type 0 +set vacf1 [correlation new obs1 $vel corr_operation scalar_product tau_max 1 dt $time_step] +# this is the minimum number of arguments to the correlation +# by default it uses the trivial correlation algorithm, which is usable only for relatively +# short tau_max < 100*dt + +# same vacf as above, but using the multiple tau correlator and much longer tau_max +set vacf2 [correlation new obs1 $vel corr_operation scalar_product tau_lin 16 tau_max 100 dt $time_step compress1 linear] +# for longer time scales, use the multiple tau correlator by specifying tau_lin +# linear compression avoids loss of statistical quality of the data on long +# time scales, but is only usable with some combinations of observables +# and correlation operations + +# mean square displacement of all particles +set msd [correlation new obs1 $pos corr_operation square_distance_componentwise tau_lin 16 tau_max $run_time dt $time_step compress1 discard1] +# we want to compute the msd for for all time scales between time step and total simulation time +# therefore we use the multiple tau correlator with tau_lin=16 +# discard1 is the default compression function of the multiple tau correlator +# to compute msd, discard1 is the only safe choice + +# same msd as above, but we will update it manually, with much lower frequency +set msd_man [correlation new obs1 $pos corr_operation square_distance_componentwise tau_lin 16 tau_max $run_time dt [expr $time_step*$int_steps] compress1 discard1] + +# FIXME tcl_input used to be implemented, but somehow got lost? +# msd of particle 1 based on TCL input, but with much lower sampling frequency +#set dim_input 3; # dimensionality of the input needs to be specified +#set msd_tcl [correlation new obs1 tclinput $dim_input corr_operation scalar_product tau_lin 20 tau_max 100 delta_t [expr $time_step*$int_steps] compress1 discard1] +# this will be also updated manually + +# Tell Espresso to update the desired correlations automatically +correlation $vacf1 autoupdate start +correlation $vacf2 autoupdate start +correlation $msd autoupdate start ## Now we want to measure the mobility of particle 1. We use an external force ## and investigate the mean of its velocity in x direction. -set part_with_force 100 -set counter 2 -for { set i 0 } { $i < $part_with_force } { incr i } { - part $counter pos 1 1 1 ext_force $force 0. 0. type 1 - incr counter -} -analyze correlation 4 first_obs particle_velocities type { 1 } corr_operation componentwise_product tau_lin 20 tau_max 100. delta_t [ setmd time_step ] compress1 linear -analyze correlation 4 autoupdate start +# create one more particle of type 1 with an external force applied +set force 1. +set new_id [setmd n_part]; +part $new_id pos 1 1 1 ext_force $force 0. 0. type 1 + +#velocities of particles of type 0 +set vel_force [observable new particle_velocities type [list 1]] + +# now we take the componentwise product instead of scalar product to get +# vacf in x, y and z separately +set vacf_force [correlation new obs1 $vel_force corr_operation componentwise_product tau_lin 16 tau_max 100. dt $time_step compress1 linear] +# and make it autoupdate +correlation $vacf_force autoupdate start + + +# FIXME the rest has to be updated by Stefan, because I do not completely understand, what kind of results is should produce -analyze correlation [ analyze correlation n_corr ] first_obs density_profile type { 0 } startz 0 stopz 10 nbins 10 second_obs density_profile id { 0 } startz 0 stopz 10 nbins 10 corr_operation componentwise_product tau_lin 10 tau_max 1. delta_t [ setmd time_step ] compress1 discard1 compress2 discard2 -#analyze correlation 1 first_obs radial_density_profile type { 0 } center 5. 5. 5. stopr 5 nbins 10 second_obs radial_density_profile type { 0 } center 5. 5. 5. stopr 5 nbins 10 corr_operation componentwise_product tau_lin 5 hierarchy_depth 1 delta_t [ setmd time_step ] compress1 discard1 compress2 discard2 +#correlation [ correlation n_corr ] first_obs density_profile type { 0 } startz 0 stopz 10 nbins 10 second_obs density_profile id { 0 } startz 0 stopz 10 nbins 10 corr_operation componentwise_product tau_lin 10 tau_max 1. delta_t [ setmd time_step ] compress1 discard1 compress2 discard2 +#correlation 1 first_obs radial_density_profile type { 0 } center 5. 5. 5. stopr 5 nbins 10 second_obs radial_density_profile type { 0 } center 5. 5. 5. stopr 5 nbins 10 corr_operation componentwise_product tau_lin 5 hierarchy_depth 1 delta_t [ setmd time_step ] compress1 discard1 compress2 discard2 -inter 0 0 lennard-jones 0 0.25 0. 0. +#inter 0 0 lennard-jones 0 0.25 0. 0. ## We also calculate the variance of the x component of the velocity ## as reference value (to see that everything works). set var 0. set av 0. + + ## Now comes the main integration loop -set nsteps 100 -set ofile [ open "v.dat" "w"] -for { set i 0 } { $i < $nsteps } { incr i } { - integrate 1 - ## The correlation is updated after every MD step - analyze correlation 0 update - analyze correlation 2 update [ part 0 print v ] +set round 0; +set time [setmd time]; +#set ofile [ open "v.dat" "w"] +while { $time < $run_time } { + if { [expr $round%1000] == 0 } { + puts "Integration round $round, time $time"; + } + if { [integrate $int_steps] != "" } { + puts "integration failed"; + exit; + } else { incr round; } + # Explicit call to update a correlation + correlation $msd_man update; + # Updating the correlation from TCL input + #correlation $msd_tcl update [ part 0 print v ]; set av [ expr $av + [ lindex [ part 0 print v ] 0 ] ] set var [expr $var + [ lindex [ part 0 print v ] 0 ] * [ lindex [ part 0 print v ] 0 ] ] - puts $ofile [ part 0 print v ] + #puts $ofile [ part 0 print v ] + set time [setmd time]; } -close $ofile -set file_corr_number [ analyze correlation n_corr ] -analyze correlation $file_corr_number first_obs textfile "v.dat" corr_operation scalar_product tau_lin 20 tau_max 100. delta_t .01 compress1 discard1 -analyze correlation $file_corr_number update_from_file -analyze correlation $file_corr_number write_to_file "test.dat" - -analyze correlation 0 write_to_file "corr0.dat" -analyze correlation 1 write_to_file "corr1.dat" -analyze correlation 2 write_to_file "corr2.dat" -analyze correlation $file_corr_number write_to_file "corr3.dat" -analyze correlation 3 write_to_file "msd.dat" +#close $ofile +#set file_corr_number [ correlation n_corr ] +#correlation $file_corr_number first_obs textfile "v.dat" corr_operation scalar_product tau_lin 20 tau_max 100. delta_t .01 compress1 discard1 +#correlation $file_corr_number update_from_file +#correlation $file_corr_number write_to_file "test.dat" + +# to make use of all the history, finalize all correlations when the integration is done +for {set i 0} {$i < [correlation n_corr] } {incr i} { + correlation $i finalize; +} -#analyze correlation 3 finalize -analyze correlation 4 write_to_file "corr_with_force.dat" +correlation $vacf1 write_to_file "vacf1.dat" +correlation $vacf2 write_to_file "vacf2.dat" +correlation $msd write_to_file "msd.dat" +correlation $msd_man write_to_file "msd_man.dat" +#correlation $msd_tcl write_to_file "msd_tcl.dat" +correlation $vacf_force write_to_file "vacf_force.dat" +exit; #Lets look at the average velocities of the particles #with external force -set average [ analyze correlation 4 print average1 ] -set variance [ analyze correlation 4 print variance1 ] -set corrtime [ analyze correlation 4 print correlation_time ] -set stdev_mean [ analyze correlation 4 print average_errorbars] +set average [ correlation 4 print average1 ] +set variance [ correlation 4 print variance1 ] +set corrtime [ correlation 4 print correlation_time ] +set stdev_mean [ correlation 4 print average_errorbars] set true_value [ list ] set true_correlation_time [ list ] for { set i 0 } { $i < $part_with_force } { incr i } { @@ -136,13 +201,13 @@ exit ## Finally we print the result to the screen. -#puts "average [ expr $av/$nsteps ] [ lindex [ analyze correlation 0 print average1 ] 0 ]" -#puts "variance [ expr $var/$nsteps ] [ lindex [ analyze correlation 0 print variance1 ] 0 ]" -#set ct [ analyze correlation 0 print correlation_time ] -#analyze correlation 0 print -#set dens [ analyze correlation 0 print average1 ] -puts [ analyze correlation 0 print average1 ] -#puts [ analyze correlation 1 print average1 ] +#puts "average [ expr $av/$nsteps ] [ lindex [ correlation 0 print average1 ] 0 ]" +#puts "variance [ expr $var/$nsteps ] [ lindex [ correlation 0 print variance1 ] 0 ]" +#set ct [ correlation 0 print correlation_time ] +#correlation 0 print +#set dens [ correlation 0 print average1 ] +puts [ correlation 0 print average1 ] +#puts [ correlation 1 print average1 ] set mean 0 set var 0 diff --git a/src/statistics_correlation.c b/src/statistics_correlation.c index d4c55e97f10..620b247ccfb 100644 --- a/src/statistics_correlation.c +++ b/src/statistics_correlation.c @@ -663,8 +663,6 @@ void autoupdate_correlations() { // printf("checking correlation %d autoupdate is %d \n", i, correlations[i].autoupdate); if (correlations[i].autoupdate && sim_time-correlations[i].last_update>correlations[i].dt*0.99999) { //printf("updating %d\n", i); - if (i==1) - fprintf(stderr,"updating %d\n", i); correlations[i].last_update=sim_time; double_correlation_get_data(&correlations[i]); } From 993474df4b68f916dfd7a7c58074500d78a4e5bb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Peter=20Ko=C5=A1ovan?= Date: Mon, 13 Feb 2012 13:49:53 +0100 Subject: [PATCH 2/3] Forgot to add new figure to the repo --- doc/ug/figures/correlator_scheme.fig | 211 +++++++++++++++++++++++++++ doc/ug/figures/correlator_scheme.pdf | Bin 0 -> 16289 bytes 2 files changed, 211 insertions(+) create mode 100644 doc/ug/figures/correlator_scheme.fig create mode 100644 doc/ug/figures/correlator_scheme.pdf diff --git a/doc/ug/figures/correlator_scheme.fig b/doc/ug/figures/correlator_scheme.fig new file mode 100644 index 00000000000..03249cfa9a8 --- /dev/null +++ b/doc/ug/figures/correlator_scheme.fig @@ -0,0 +1,211 @@ +#FIG 3.2 Produced by xfig version 3.2.5b +Landscape +Center +Metric +A4 +100.00 +Single +-2 +1200 2 +6 -1170 495 270 990 +4 1 0 50 -1 16 14 0.0000 4 225 1425 -450 675 Compression\001 +4 1 0 50 -1 16 14 0.0000 4 180 510 -450 990 level\001 +-6 +6 1485 720 1950 855 +4 0 0 50 -1 16 12 0.0000 4 135 285 1665 855 = 2\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 1485 855 t\001 +-6 +6 810 990 1275 1125 +4 0 0 50 -1 16 12 0.0000 4 135 285 990 1125 = 1\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 810 1125 t\001 +-6 +6 5985 720 6615 900 +4 0 0 50 -1 16 12 0.0000 4 180 450 6165 855 = p-1\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 5985 855 t\001 +-6 +6 3105 2460 3570 2610 +4 0 0 50 -1 32 12 0.0000 4 90 90 3105 2565 t\001 +4 0 0 50 -1 16 12 0.0000 4 150 285 3285 2565 = p\001 +-6 +6 6120 2145 6975 2340 +4 0 0 50 -1 16 12 0.0000 4 195 675 6300 2295 = 2(p-1)\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 6120 2295 t\001 +-6 +6 6165 3585 7020 3780 +4 0 0 50 -1 16 12 0.0000 4 195 675 6345 3735 = 4(p-1)\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 6165 3735 t\001 +-6 +6 3015 3780 3585 3960 +4 0 0 50 -1 16 12 0.0000 4 180 390 3195 3915 = 2p\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 3015 3915 t\001 +-6 +6 4320 3720 5235 3915 +4 0 0 50 -1 32 12 0.0000 4 90 90 4320 3870 t\001 +4 0 0 50 -1 16 12 0.0000 4 195 735 4500 3870 = 2(p+2)\001 +-6 +6 4860 720 5490 900 +4 0 0 50 -1 16 12 0.0000 4 180 450 5040 855 = p-2\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 4860 855 t\001 +-6 +6 585 1800 1080 2700 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 0 0 3 + 1080 1800 1080 1935 630 1935 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 1 0 2 + 0 0 1.00 60.00 120.00 + 630 1800 630 2700 +-6 +6 1080 1800 2025 2700 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 1 0 4 + 0 0 1.00 60.00 120.00 + 1575 1800 1575 1935 1125 2070 1125 2700 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 0 0 3 + 2025 1800 2025 1935 1575 1935 +-6 +6 1530 1800 2925 2700 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 1 0 4 + 0 0 1.00 60.00 120.00 + 2475 1800 2475 1935 1575 2070 1575 2700 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 0 0 3 + 2925 1800 2925 1935 2475 1935 +-6 +6 1035 3150 1980 4050 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 1 0 4 + 0 0 1.00 60.00 120.00 + 1530 3150 1530 3285 1080 3420 1080 4050 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 0 0 3 + 1980 3150 1980 3285 1530 3285 +-6 +6 585 3150 1080 4050 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 0 0 3 + 1080 3150 1080 3285 630 3285 +2 1 0 1 0 7 20 -1 -1 0.000 0 0 -1 1 0 2 + 0 0 1.00 60.00 120.00 + 630 3150 630 4050 +-6 +6 4590 2385 5280 2565 +4 0 0 50 -1 16 12 0.0000 4 180 510 4770 2520 = p+2\001 +4 0 0 50 -1 32 12 0.0000 4 90 90 4590 2520 t\001 +-6 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 900 1350 900 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 1350 1350 1350 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 2250 1350 2250 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 2700 1350 2700 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 3150 1350 3150 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 1800 1350 1800 1800 +2 2 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 5 + 450 1350 7200 1350 7200 1800 450 1800 450 1350 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 6300 1350 6300 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 5400 1350 5400 1800 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 900 4050 900 4500 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 1350 4050 1350 4500 +2 2 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 5 + 450 4050 7200 4050 7200 4500 450 4500 450 4050 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 5850 4050 5850 4500 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 4950 4050 4950 4500 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 900 2700 900 3150 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 1350 2700 1350 3150 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 2250 2700 2250 3150 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 1800 2700 1800 3150 +2 2 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 5 + 450 2700 7200 2700 7200 3150 450 3150 450 2700 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 5850 2700 5850 3150 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 4950 2700 4950 3150 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 3870 4050 3870 4500 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 2970 4050 2970 4500 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 2970 2700 2970 3150 +2 1 0 1 0 7 50 -1 -1 0.000 0 0 -1 0 0 2 + 3960 2700 3960 3150 +2 3 0 0 -1 0 70 -1 2 0.000 0 0 -1 0 0 7 + 270 270 270 2205 2970 2205 2970 4725 7335 4725 7335 270 + 270 270 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 4 + 0 0 1.00 60.00 120.00 + 540 1350 630 810 1620 810 1665 1350 + 0.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 4 + 0 0 1.00 60.00 120.00 + 720 1350 810 1170 1080 1170 1170 1350 + 0.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 7 + 0 0 1.00 60.00 120.00 + 495 1350 495 900 1170 450 2070 450 4950 450 5805 630 + 5850 1350 + 0.000 1.000 1.000 1.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 8 + 0 0 1.00 60.00 120.00 + 450 1350 360 900 675 360 2205 315 3465 315 5670 360 + 6885 585 6795 1305 + 0.000 1.000 1.000 1.000 1.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 4 + 0 0 1.00 60.00 120.00 + 810 4050 1125 3825 2565 3825 3105 4050 + 0.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 4 + 0 0 1.00 60.00 120.00 + 765 4050 990 3600 4140 3600 4365 4050 + 0.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 6 + 0 0 1.00 60.00 120.00 + 720 4050 1080 3510 2385 3420 4680 3420 6030 3645 6570 4050 + 0.000 1.000 1.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 4 + 0 0 1.00 60.00 120.00 + 810 2700 1125 2475 2970 2475 3285 2700 + 0.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 4 + 0 0 1.00 60.00 120.00 + 720 2700 945 2250 4365 2250 4680 2700 + 0.000 1.000 1.000 0.000 +3 0 1 1 0 7 30 -1 -1 4.000 0 1 0 6 + 0 0 1.00 60.00 120.00 + 630 2700 990 2160 2295 2070 4950 2070 6255 2340 6660 2700 + 0.000 1.000 1.000 1.000 1.000 0.000 +4 0 0 50 -1 16 14 0.0000 4 165 135 -585 1665 0\001 +4 0 0 50 -1 16 14 0.0000 4 165 135 -585 3015 1\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 2340 1665 i=4\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 2745 1665 i=5\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 1890 1665 i=3\001 +4 0 0 50 -1 16 14 0.0000 4 30 1080 3690 1620 . . . . . . . . . \001 +4 0 0 50 -1 16 12 0.0000 4 150 270 990 4365 i=4\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 540 4365 i=0\001 +4 0 0 50 -1 16 14 0.0000 4 30 540 5130 4320 . . . . .\001 +4 0 0 50 -1 16 14 0.0000 4 165 135 -585 4365 2\001 +4 0 0 50 -1 16 12 0.0000 4 195 780 6075 4365 i = 4(p-1)\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 990 3015 i=2\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 1440 3015 i=4\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 1890 3015 i=6\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 540 3015 i=0\001 +4 0 0 50 -1 16 14 0.0000 4 30 540 5130 2970 . . . . .\001 +4 0 0 50 -1 16 12 0.0000 4 195 780 6075 3015 i = 2(p-1)\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 540 1665 i=0\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 990 1665 i=1\001 +4 0 0 50 -1 16 12 0.0000 4 150 270 1440 1665 i=2\001 +4 0 0 50 -1 16 12 0.0000 4 195 840 4005 4365 i = 2(p+2)\001 +4 0 0 50 -1 16 12 0.0000 4 195 495 3195 4365 i = 2p\001 +4 0 0 50 -1 16 14 0.0000 4 30 540 2340 2970 . . . . .\001 +4 0 0 50 -1 16 14 0.0000 4 30 960 1620 4320 . . . . . . . . \001 +4 0 0 50 -1 16 12 0.0000 4 195 390 3285 3015 i = p\001 +4 0 0 50 -1 16 12 0.0000 4 195 615 4140 3015 i = p+2\001 +4 0 0 50 -1 16 12 0.0000 4 195 555 5580 1665 i = p-2\001 +4 0 0 50 -1 16 12 0.0000 4 195 555 6480 1665 i = p-1\001 diff --git a/doc/ug/figures/correlator_scheme.pdf b/doc/ug/figures/correlator_scheme.pdf new file mode 100644 index 0000000000000000000000000000000000000000..117dd105517b07577e12721af3ccd36e3e42fc4d GIT binary patch literal 16289 zcmb`u2UJtrwlFLTQUyVfB4QMfP8uy#>AiOZAwYo80!ipqdX?TpK%|O*^de1~^rq58 z5Tti$f*|+>^_+Xox%d6={bSr@B-wkdS=TIkubH`7HDu&?z`T4UtX&&1&q(+IAOOnT zmPAw(sDf~`#@YY`a3fkkIix)nfd&BO?BQ6148j6si4Yej!C=t{xC4p%lRTfkM?8w? zkIl`WH~0ox#UhAemdypI@bLl!ECpW^^w3xfF+KyrC?t=4($c&v-#jmJoMy_Vt?lK4 zW-=`;#dM#&V;0+E-f1~H%xY;qU2eG%^XbPTkC?tv&F=gm>cs>!`j{Phuw2nKt{gx0 z8N&R?)mcZeY(wABDTjJbE~=ebgkvAYh2>&o15RazFTMWpIT1l zK7neMs~ls19%9x5dpqr@Pre89+h>Q{z6++Kj#dpOn`1j0^#pf!=A^q3Z_N1E+0kP9)JeRx?^r)x-VWoW zZ^eu8lo8^r9*h{R-g5Pi-I$iPt7kh6EoiNJ@jdqW9fYL5mfeW$sb7uo*bt9TCf$O! z@ujfxvz`0$u)azbcCN&N58szuKAV4kbyQX&u3c(SYEaW;;#VzPZM}`*+j`)m)-Pf7 z{Xnqx=##Je$y9BGw6?O>_pDm`G-t%yiBBQ7AReJKtImov>Z=?(s3`2x_R<_G_&O{7_1i4h z*iv3Q4!8m~D8Ik1tnU?zz;1mI+o*OTLHp5v-O*7PEGW%r$@~zXQuEbu+97yh@wq_& zkC(|sFOz_V=Qzw=m}$()cuI@4`EVa+@Pct6$n{WqiqY45DYb?4fhQM8;D9lPbkN#2 z4qO#ZfVYGY(5Q%lQzchhKO_D==huA7>35hVZZ=Uo6hPC=bh0@m@qLY=<@qNc`kPJt z0j3|^e(bmZa6i#ndRMnX&f#|yZ31#3Bb1mTTVa$|7r(Xruc5*Q{t3>+U<$isJtPOPsbvw&TT|yEzL)NZB+~%ncS<``y zi^@nkPo}f=mp(@IW7g}7)3%}?h5dS(E}_vx6*IAm->=!&rU^xwcIx*amv;is_UETs zniofC8GIAtU$cytMz^l6%-d0ir?q zdVfB&dE&>X9|QBBKAp|s;WNGvKO<)TzIorn|8*WhJfhEB3u1|!cTg~UE5ysC_U`to z`eS?P@o9p0S5V4B`8^9dnbGZQ1|wTk8p!5go*nrxQpUSAg>`0Pa23*1(t)5HTW|aZ z?NQ9#(9hpT?02kf^BGbCN}#CP_z4M;N6gNN@dDL%LTJc|1$2hY6yx30-plq~o_3gl z80F4e+!R@iALUcMaiv>HQBho~y|UQ|WUnErP-fd4Nf)LdwHR-6{Rzd*tjE`^r4<87 zS%{C{qiHxfmUO{ZSzdGgT*sp+Tvn&}vLmduEPql12zSIs{AxZe?a-^k+ zlS`i4&1<3E>E3I|&xSqLDaja;YRlGFtkH5Z*snC3&6J*pN76)SdmN4GXAFmUDlxof zIoK@XPt3v|`3^p7F~9U0h*BGNdB5^idBD_dQ@y za3VO9BT!5`xZxIos0`V6CO+R z%v%wf>8qq!(6}?3JXzmQLuj(*=Rk0C{=?vA(d)bQ6XVHab8nVrzA6s@M+>7Q`VR@e zBwX6nEdRvyqxRNoqwJ0k#*Bhj$viCq!ol+wD$q^fdC4!lEEl!bm6-3*Ert)Rv#|)_ zH{A-I^U;?W^7pT{Hk-R{{GIUlfvg+o+u5Cm-{=Qe-80fhYXtJdy?2gt0~9L4s)FO# z1uc-4o#IL#+!M@a(A5E9oL3N|U}{ZeZ@ihK3i zn-Pbjm2Uz!nMJ7BNRI>R?TG117i73@T~X5O=9%TN)+h95ZMTn|HKg^A3%@=$vgJ+_ zRm>WhDZm^UT024|koan%_wLPLt1J|rbbf8%!5bT+>Fd+}m+|Yz%DNxv-EFer3wfw& zph+}r;BrQgS8!pS9uO!o@=PFVoH%rjEk2(7xavDVs*?WUdQ~zxU?Mm;Q&{pY5q-#~ z$ZxF!I;iijGHWy?FWT#x5!b{NluO@Ka>Ll%$!l`;BYu=Bge>4y3S~Hq07`B*6^s>O z3oVQW=^4~6&?^(NAW(FeMMXe0+QfcX_8pX`tQNy|nH74q0W&@!av3i z1m9S)S90o4%x#%ieYMy;7?`rK_~z)a3wkA=1= z3#gTEaXhw_$ZSZh75%!4oQ{yx&Qw&l%XnyFiK~0qv-`6Vy zG6>l6u?}avGUkYsfPQ>t9AU>(e{0ej5X_GeYJ5L!g!z$5MTw2gxl8(LD9q)8+X($? zkb}}N*%dsBl$zTX0LF>Qf5A>jJHW zxz1Qs7Bg)AO?Nn^V|6OkoK%Ecb*9eZ{oUtCXsvS1FpQbLJqNuC8qeP~zKWxu<2 zA3~avJ3GL;AtT5Hv6xm#k#G%{AZruFMVi*9=7NeK-vX-Ee6CfJgRR|tt{$XWrb}Wr0y;nfisM6i$khv4rOEsipy*Z)N+A)Y}tT-8(IF$eJJ=4ffqya@-1>$hm zsOvQ403*%wBg(3iiOk~PTN4?{Fvd%FTT@k9%UpZavj5@TP=c2_O~%HIQ7#eCQ=P^e zez-KV(YjT>v73i-rC?&cb+c&sVUA9a$V4$cWwH*oc}s|!#gOf@D?5yyJQT)lP08VQ zm`D6s5&mgg&a&F+~kLVz_*23Gbm{QNsejOiq7?GDJ`IwaU z`OuS^3v+Q{cm%tkSy#==cefnjdFYA^t(SE2aR|oYl)EE8!* zVhaswto6$X=fvGRt%ioYu>=u%BtA2L=_R&{#1Xv zd0BJT{A(xnx!KL3DV8-8@x!zigbBP+L+|8lHS4m8Id;tnzG}dFr9F&0Z8Uz`3W|C4fp?gID}}V9g=ikM zr>2F5QyM?zy(VA;@~OMWWXHf4PvLQo=`}r1y<~MK67B6Ga(};|q1{~Q9_UK$Sx!jY zi`)xRt5@>*b%)}kIsFVC#4LC45|xtOMZ5EF=mxi<^Cs#{pqn%zgdET~bV$i+xUmgI z@?0*~pd*3vc6LLsldH*v*|i20HVMF>wAwa1Q$bM``om0xj}UV~0+=Mdp(zaS;*xmU zLl>fq-LIRR0@*jMUcSv3e&WuUVGot+v7$S1rn*u+VxGsCn=zx79y(A`0_!)CO#VRk zvVxgWQ)EW{l!)@Z+IT5rrZ=l;_qtOnmufQOA_!d#WmlGzq2)?=gnm0UL%T(P1J0n? z0@V`|+hD8FLAG|lZRrk$SLwn>?F+1J0TT%eEfB4E^(?n|29A65bMJ(mho{@4!$`wS zcR2EiY~F|OVCo!X&1!gKdFnHvDqbclfw}q7vxP2chof>|B3`AgQO%JGXwj-sg%ecv zmD)ziJ&I>wr164bblL_??K1E1gam-Y@WCdX*1}J z1rk>!H}WMal6nn?_6&*=;XnP@%Wb3iv`#Kv%TNLIMqMGf*oUXzIAXYQeSP^_gAn>D ztGt+X_Exz@>^G`{FRvdNrGAWl93#c|T&`g`(U+v}1r;Z+=3sWw8Y`N-x2HV!nY&s? z1;n>M-!NCAUOR^5hCZ6*rlmknO#)Qb%6+)kQZ`xSvdY0cC9A=ck!yNT7K4~>p7^O4 z`Ad~fz~CjiKpV8_9n(@WyrI4c4y8P%l`_?#tDu7m3Daf6tg?n-{U%Tb^%N)KV`;Sv z^&)3q8+zq9`2`xC2X8qOcpu{%O5B=zEfyYYs6tHqlq~~Ea2ztmT=FzX)~;%6ix~Uh zl0QDf#(Wl$sKWG5(bp{#sB*8JK5lsO(95~(X2u0~pc(vif{AIQ@d_>G@wgmyGHd}; zdBHGnY;9(^t>Ko&N3VCJP&d|Fo_H$>JWVe0E2)Lf{a-?9@#4Qdo#AI~Ac4Bb(d541 z`Z}aR0?GlW@_ncO@Pd1BkVKcXxmADbk_MeejxXY^X33S33sZ`;Z@7hnea%0ISs;Rir8S( zDeOT*efnz^9)ctsbHhv#91#OtOQ3LL?$?Py#l#6y{wrg1*N)EcT|Y_h)ut0NW6=bu z==~zwvP+kzCC#%qJjQbIx9*x0WM7*rz36bPB*AJ(9?GeRwO*)xnI%Zke`knx@1Ckx zIr*5+ZO2Qkv&Y`xs}I^Rz{WPID&Hi>Scx3%!$O{I71u*&$Pvv!sPtUDS1>eiJP89= z>A51MMgGnIN8uoBc&1yv@GP5SvU_YOFu8;)+#A5g^_hr3<$~;@nU*kK})h8>CWUS){9BEd3l~gy`y0TZ!FQwA>@C$$ie-;cF z!h__BwKH{>s`Q?U7U# zpYsGo=~2J#*2i}pm{EIs$&U==zn3cWvZ!D9Zm(*2N;MIk=vX8HDK+ChttQX@MZ8V>kb3us$J(tQQ(WST|Q&nUY@h!F>gL-r;eTUGxY6% zT2GAc??9Y3zHJx=u$^~fr|oz-O+DJTt@)L3Xa@fcfO;_1g*5g&QioK zu#lRd(&0SMCuT?a{A`6KQX3Dgc4&3*%{yesJ&B&W_baMd`Hs!i_e`;E)-wX4E?W_; zcaXhPClZZt(vCS4^HG-ELZ(hP!2<+DDy^^cR|h^T1nA>#GE{UHm>wDFETFA-OS_Xg ztyKn9qj|lZ1JeS+&8Ju@DpDhNU7N^C78&cw4~bSqziHg*!qf>xRksD(apH38B!?i; zeLH4RsYQ8%`=Camh=>wsl_n(p!fNZi6{F?O3)hu{tPLwU$Gq&~9xn)B1=01v3GBAc z6I`~GbB8rhXSdmql}(m5^;TOCxS^h5x0%KSo22cd2lVer&82}CwkbwJoo*PqsDCOx z5>4B=bd`N!MUfqCSx6kY9eR?(-FBgktiJX^)=~;_ND;O@F40u^8dFV!;4Mh@Fm2!> zRKxyqpmj{-#UhOM@W4CEz;U5r$hCA$i=koC{La_z)JHEAg00MtDxlDZ-#9GFA{U_^ zV_$YnNAO4AfK!)Ci9dF?`qL^;dcH5fQ#WJo+DMlj$b0pub)|6m)AcH1`OM%SoqL8a zA}*30KW$(rc`LYV;{PSzJ%)_XvJ8gkCxtFWZoV$Zs>_V>@3qF=cV14=<+@3wr_rFE z`hHv`O4QwmSV-@PtZJ&nXsgKkog0CkdwfJbWXIJ#-a-jto%>kWVUAi+^z_YhJ3P-2 zMmp<@jzI3@On52rx|E`OO`{f(N!n!1`isvA96g*{1FW*)1$=cdWzF|V)#@3N(#n7WX*0pBCK^IRGj`LMV0n) zlwL@!9I)x5u5azu?jSdFeXTE7cG9}@<>FPQ)xgPavwOv(wz}3xem|@3x>%*J-KMOv zc!a2JSY^(7)~>TsiJ&gS9t+!$YFq*N&956eF}fF?-}E=@ze%YD90uK`E_cSiPO(GZ zbzA7etOa*U2BCN>@I6f_Xw5ND;~}iDQC%>vhX7RH zd*>s0KuAWqO9ObN{N`Bw+l>Nl(;WSmi56pJi~>#Z6_cw9I^Zl%(8HoIti+QbrGkvJ zyos=U#W3miSG9#f%IypkheY0zhG!3vZk{EIQ=AJ|J8t+NH z{HPs}5vfKJ^w4Y6tV{eEqM0$GO@_gOCgS}IpfG|G>{3?5Ok9`V{iI+b;+cm&UxQZo z7(LBPGJH0+x1D=F79M=tfSN`w?=oo()11ezE29z@7pBLvhcmSAxKk|60rYOTbG1(` ziYc2$)azL8DE*MgTfP-`AhLBzx+@@^#pcfLS8fnzK(`!nk@G>`0q`A)j*D(F$e)e+ zo&Ph}m)+lGq!yOksI9(Y4^B#(SsgImfp^{t#0x1O3Ud)H@eVRXwTdT}I@@+W!N%vx zji^&wAbIy*UCQm*qu zQ^WG^)(g)ItrCx{RP`F4XHdGtLXeU1^!ml_g+-pGnt;0aDif*}n<^FAk+kYJ4<6RO znrJH8_$X_r-CBNM%is&Lla+TiW0$ryo)|XM)7Vw_fJK#m*zJ?j`$P-(S5>JSsR4If zMC-=+l#$n!8I(13YRVriQZ?|d8@z2h(Ox%prAuK_X1K3i7&ZE6lS}7W3J))*UncHi zxt3ywlI4);)}ughV*B?*_f?ag�{Al4#;VYG>9w=_JB1t2K^+(b_+hEe5P41{ILX zqt=Hot&EPxU4~Q0*b~trtH3%;hN08NQK^s1My#wan5<1H6}FBEC=k?(pHM`aB@~j| z0rW|H1HSj-ibnX_?!Ay|?g|0kwU+m=@)y8Vhk-an>^r zVpq!zax(<9lC6VP`({9cgHgvpE4EQ((fLA~;Tw(nk`4_cqr!MsdzaJVk62_&&w_(Q zWD^bF%?&^>`9lbp+4RG^qX3!Jhee6E4NhFEZ_?eVpCoZHv z5fN!_p5TJ$g{={A_aw)&pOp4534LNrMz~-N!Gcqf)fH>WG}0L5=A!#TcOLtVnwLKa z+BCo%aB-}bMmB$#`?l3_={4UgY3gkuWi1~HrLSUxWHBNa%-n*_ORm362?z#D3MYV= ztg=6^=P{UdcPz(SwsZIB_whOx!+k57l^!d!v;{nB;Q02~^rANBnS1E_g=!ir%DPjx zPZ<(P81Kxh5?1#l29}$wd$jh;2a}#cy-R5<=!1s5V2WROveq|8JG+vHS(BwU9Jq=} zO}hpI+^tNDnobZr@`uXOQFHYI9(fqzu&z=}LG|J@| zdgKPsGr2Sk$?DKWDtZ?Ck5@*rTrZ*W=ur9`k!Kp;SFE>NsFmvodB-;W4&nd{`65EW zl%1yZfarQ-^8~2~P0S3EzN>L>uYu@Z;Zw%DmskJpQ3-}M#qcFTaUedQVcP~*}@1n=(m%uzD^bUqH$F?sijTW1{FdJw=vL*xZ{{F4!v?_uPHxpgQ{cucVEOdq|WafA&b4T?a>@$9Y0#V}% zw>Vi~TvH>%E=#NL_8?b(=#0*NqLF3q)@e6OW)Lf>mk?Pr^{eOi@LMN1M!}UnztgGc zfN{ziU!s5Rv^<_D@&n2Rmq~zX4TorJnE3~ znjiGl%!2-rd_<-9r-#^g2^Bwy17i3KI$y|aWuQ!JJ&f)(m)4uy#Q4340I!EOJI(R} z0>wr7#%trCk>e9vANc$|V7E8dPWni-O{02L)Z|s~NJpo|J1lyQ(dI*56r7$JvpnsP zZ7hr50;TuBZ$Rwu-aC;8R!D>bmW4Z)uc}-flwQBGhkhMz>AC2;G(Zkev}zP8mHTF$ z1>tJbPRt7{l!h3mr8+*ddS`c7n(BDx)h_3gu4|v?3DaU}QXZ$eE?$`4$q5?a5rcle zL+WiBkJX(MBJ5$fzLYheNshW9ApRNOKYokX7R4{hI1P`@ZIEaOf^_+z#`2OSp%gDW zLsC>u?(~psC$PT5SDTr(Yt>EM>}dqXXS@j7?&~gKi>Bj;L#J!j`-);?gS1Vn?L6)b629fJo{!}NH6&Ei@Miho{x``Oj!mp)a6iixp5J| zS=Q{&NW7DSn_r5eMmgwTb#jK|cPx7UaO7B|o31%?9$!twCWU`Ax&IaTa{7`$K7mg) zZ@AywT;}0fTYHnhaa+64b?W8_$Bvmu$Ne-c^`Rd9O=P8)V6+IoMZ1_}`>SDJ&S3Dx z5h>@-uG$#XwiYk5M|O2R_0=~cb8hQ}an6s>h2>xY)m@c92Gd}Jb*;0y0v)W05@AQA zUd^{@>y?Th;~aqwFLwjosl1FGwZxvO*?I3|v)uv<%%E(*JS;kBAHB?i*cnRNkNcIz zstM-3_e{X-a!;&PB$~HlFHzB}=(NA+)g5>eY3k1~IUd9GL~2?^*pAO*vq7aeol5R& z9bx>DbN6E5ofHB`(X=6*b)u@Z%zlmH5wSg-r{=UgtUg6n zCpYlx>sE{Bu553UcH2>wmo~PVP!Q(sN-Nt3(-!TYL=oF}-KG-Q%@JFqrhU3!BH!|; zGe(A6(>HQTGAd59)(A~k_+7!y`OUU|Nn%hOEmJ|xZjPkr94)+%&#v{G4;V>BL(JG} z%1QC4u1A51^f{;RwN(mI14TR4@10+7a`^T{e9YZaEO$+KFxKhG`OZJ8Bi8YVmHh!w zw5^0r#(4}^(|*qBqG99IaR^MsPb0zKYEHw?PFMKR37jf_|B}o7)8`kgjbtm?X`kI@ zxPbWXnXj2MlQa<&g;%;*rqkE<%#M--HA5#>&^+b{d0+KqiJYlh;kzQdr>E)35p&Z= z0>|U`+fLp)KNJ_*WBo>u<(^ycT%o|Pd~cnHLyWQfNrQZ~xYFm6A=dOB)7#Z_if>_DC6jHle<$@cLp+Ccd` zO&dqaA4Qte=SMrAmfY==kva z_}s0hJ-DVAwVF9?r#{X}dkVa_wfgM`o58`8&#x&%|UisafMEUSKFTl1`ag{}l&&8yMC(!SvsPI@U z<3QY`Oz`5?ENj8pWRN)d!l_xy;aWYK3{JX5=}q_UN?liX?Sd~*W7&H<<>D4NJ(e+@ zlJ2V}+Wrk2)veKT&h6vl3w2#AhkY-VfApA1xbFG_y!U=w)TYXvML1giZWhBGoi~`B zx0Rj$!UP4NB)`Y_{?&Y@u2iynYrum^l0EiISgakK$0f0IPcPtKE)*UOajm0&u0l<)7C~_zq zI`|hpu9pocjXTFt!~o8F;Iv3^L%+p32l>?#hogp*3-D7g3;?K%w8Q}JlbnmKMPdTL zQE)jY`4?31pQyhvrQuk(J<9qI62GNaMPT8UxETOg=pVcV|4!gvYLEccHBzsf8gd*YR7XbWEpTAQ0g$E+}H6{o-cj*84 z{tfXT=gu+yGJnqZ7aZg_z4KoooSeTU`Y9dgr{w&gUy!)pzgh98(*ge{EB>1+|F-`B z4=eKV109f#E||ZV@u&O!)j9s|otWx;gCsyiny3YT1Yxt+aqvm3e?6T9P|JJ z5E7s&5`)3{_fKB{K={we;KBlzo7m5tk>jUuxXxMx6afC2ME>QSKhJ-w^{e;xudTN@ z9)ArL=|6gP`5_>2@jsh#qvM9`u1QjNS;y1SMjsHmkLZ2Uyp1nvv#I)=LCNCEl|1+O zo-KO_wxY6MMFH1vLfPT)3@~b&8~iy&ul1G=MFefD^7gTM;!I^8t@SlqQBN=Er$e8t z!%I0bWbmj%+O?!-vzn>I>Yts2u9J5KTDjkDY(e2KCX0l&#Zi-J?EsS88!?{^j@E4I zJcW#5Zx02(Ts(_t8B!UNlB|0r$D-O^_hmgksh43eIyzdaoLp|`-b1Zn1PSjv^&8Xz zN=*eZr{ZdRx#E3Jyi>P+>5O7lZOt$W$a1snoRzt1c`}Hm=FI?`@N)2Lgjfxa7cjyHZGm@{%2><9m|bDU(au zO4y6kILW%Z?+%zUhA)GQ%pV}{sR!n=-npr! z`_i#0snz?-(EE-g`uZ!{p2Q{Z>`)%fSO$Vi?90v9;jiw{-H&id!zfQ+2)}sRj zN{ysNmi6OXAI<`#{4T^;?VO%x(Z64M&$HP7C1U<#HvA)KAh?JT6o3E(`M`ff4HW0` zze5H$`qz;8dHQF_;86dEm;r*##t#H?W zAQET~boiAcfQ~>E5aoyfIsu)|Zx!ru#mQes=<_fJIw8K)DlxI;jFYXyX}6XWpO%v!Oo5CyueO6K z=lNbiRG$m5O*wvxjHGie53CF7>@X9vDv9NmT!YSI%CALcFxt^71MAyPJ-%duV;OKIY3xniwwZ!hkKc`V`M;Okmu-ITJ~N_?G# zbp79FXSX!AoO`Podjy7bfkc&** z0&dcto}_s~Kk`>PNfy(Fxs|bn-_g9T{snKlJf#dzAXx31gokd>mqzA*el}jkUjL+? zQ^2KhiL}g~j1m#$(2kzl`3LlN2Q=ko{=*lp5qFBaXYjJZwJkGg?^c4HZQXO&ikMel zJR>FpyRUV^6y{Fr+E+uXPRNdf+)JqTERh$!JPp7nRhTNIm z*~{=`&!Gov4wdN}AH>YB%FW(!gXWdw-qhC5j#!#bocwea6!3>WUS{_4$Ctvd3ky~ zGCl|vbI?9`*QdIeabnk3_C}0(elvQCuV66AgHgz5VZY&uH_@SZ+EfOzNFfONTW8`ZqgzQzFndDRrUnVEH}DCcE($8JPB!d2<;tr$^)l) zI1ACBL`>Z=L!)k`Oik_jZlo<4ji2}fzGatA=qA~y6tc<$Z~at6JNCV>;KBO%58X8)>d9MzhUq3nuD#M)le4B%w_GKT zb*?Q-`c{c-EVUnhRCU!hzT=aL?qU;e;0Cu#kH`6lUzeXRHko?gSiEVcsqyvPhK<|{eem?|l?R~(^m}}y_wxyWFOh?T4eESpsY_rR zI*Yjqev^ryYh)tOiqFcYVCVUsIlMcU{Jz|LCRTY%)YUKjn{`Y^Efuf}{$N#TX-b4@9RgPk~XkJs)BpWDlL@H7>+% zjqk{RqbHx;lhM(pzC_)5L57H>?II;G%RK`1Ja?g-$FM?+#4VF3H#N|FCB9MVgMGjS zD07H5pX|PzJ-Km_L$hV9o&8BWz31DGOG(RbJKS5s`fpr9@NFk3c9BLmh%0|n61=zf z_3`7&rW(cGlGh8C8HcxHO+(0ttIj&+vyl@*w&sq$mj?Kb8+1Cn9-4lVzN+`xMzzM% ztDlLJ3oikdRgBUyRrk$>PWq`OXahnCB!|MdJ?%6f%RL}yS#jUF=^K=ek$AMY>W57F zvhVoJ&ndvx!NB{lZn0Gwx>+OK&^$Tgrx85fDnW4Hqsm~A$Hn{LOmWpGkee~LWJ&eo zdzoOxt9M&XWcX#P#|MM`^)DBWE@WIINs#To5+9QV#R z=gQu{ z7y$0>U+j*wmWte+a{zZ?cLyg2+#Vj_?qKhT5q1}2`-viqJ3b!qQQQS~yI{=thfDK0yZD}Q}g)3kDB8WQ^W3$0xorHlv zH#aw4Hz+R(Z4Cs&U@#yE0)#+#a1cBg4@WHAoyQTwjN|uH5TwDMXpR`(pEB`Upd5fm z10a|e1pI^1AG>)Z$`NqR7;cVo!HTi*gP`VMI2^_!00Il}fWZh09uV$@`!a{{Lm+|> zkbux1oSgn5;on%{CUCr2Pbh6+I528F@?DkK1ecm+Xhz~7qx z7X-pt;e4nU+jO<_>Ce8JU7!{R_VXEZg3}6)&K96b8t9!=yNz>8I*;K18yIzDD!`Gm;a)L zEiL{O)5!&G|1%;iEr1An#JNNmoRPseIV~-Otx#wOIL@EpPENROIvk5zWFXEN*~CS0 zlUO9y9wGiS-X!g@zYgq?=kX(K4|lW{V{_-RL|DOH?6GX(7AQ0tfqMWJg*L@l*x=ak z;%xWJ7WPQ-Uk5*l0RKf8cVuzyCn)qU+S&-`KbVNyp)e>{xZ^#zJ+4LtFlN_4oIhi$ z{A<*h;~$iNVxCj}liq*uf}caU#DQ}N;GbzpoCNS+dj0nZ{VNya_NQWOZs(cy-*&3! zg{|MI{*TPT{+|*D2X0$WVI3xv+O zL$LoxQ~$vRg=YU>gBAxX>4LSvf&MR6=J>OO>yAcP;VN=q7|D5>{q;I8MM3!iR)Ak) zU=ZlMo_PKOIQ}ul&kx4c*8V;Q0)cVc{J)KXU_t`_#Ki%CaXZ+5!twnR4h+IwNdEx` z2Js2~lLr`#E9d_k9EAU$Jis6rAFkr{H@-L?=k?Hkj6u#TVSgV3gZclBF9ZbpcOHD8 zf0spw|KHY%54Q;a;( Date: Tue, 14 Feb 2012 15:52:49 +0100 Subject: [PATCH 3/3] Further changes to UG of the correlator. --- doc/ug/analysis.tex | 78 ++++++++++++- doc/ug/bibliography.bib | 248 +++++++++++++++++++++++++++------------- 2 files changed, 240 insertions(+), 86 deletions(-) diff --git a/doc/ug/analysis.tex b/doc/ug/analysis.tex index 7429624e137..d5b10e3d3cb 100644 --- a/doc/ug/analysis.tex +++ b/doc/ug/analysis.tex @@ -17,6 +17,8 @@ % You should have received a copy of the GNU General Public License % along with this program. If not, see . % +\newcommand{\taumax}{\tau_{\mathrm{max}}} +\newcommand{\taumin}{\tau_{\mathrm{min}}} \chapter{Analysis} \label{chap:analysis} \index{analysis} @@ -971,6 +973,7 @@ \subsection{Introduction to the concept} In general, a correlation function is any function of the form \begin{equation} C(\tau) = \left\,, +\label{eq:CtauDef} \end{equation} where $t$ is time, $\tau$ is the time difference between the moments when the observables $A$ and $B$ were measured and $\otimes$ is an @@ -1288,8 +1291,8 @@ \subsubsection{Multiple tau correlator} For a more detailed description and discussion of its behaviour with respect to statistical and systematic errores, please read the cited literature. This type of correlator has been in use for years in the analysis of -dynamic light scattering~\cite{dls}. About a decade later it has been -re-invented for the Fluorescence Correlation Spectroscopy (FCS)~\cite{fcs}. +dynamic light scattering~\cite{schatzel88a}. About a decade later it found its way +to the Fluorescence Correlation Spectroscopy (FCS)~\cite{magatti01a}. Despite its obvious advantages, has been used scarcely by the simulation community. Even a detailed description in the book of Frenkel and Smit~\cite{frenkel02b} for the special case of the velocity autocorrelation function did not really @@ -1305,8 +1308,75 @@ \subsubsection{Multiple tau correlator} Let us consider a set of $N$ observable values as schematically shown in Figures~\ref{fig:dataSet}, where a value of index $i$ was measured -in time $i\delta t$. To simplify the notation, we further drop $\delta t$ -when refering to observables. +in time $i\delta t$. We are interested in computing the correlation +function according to Equation~\ref{eq:CtauDef} for a range lag times +$\tau = (i-j)\delta t$ between the measurements $i$ and $j$. +To simplify the notation, we further drop $\delta t$ +when refering to observables and lag times. + +The trivial implementation takes all possible pairs of values +corresponding to lag times $\tau \in [\taumin:\taumax]$. +Without loss of generality, let us further consider $\taumin=0$. +The computational effort for such an algorithm scales +as ${\cal O} \bigl(\taumax^2\bigr)$. +As a rule of thumb, this is feasible if $\taumax < 10^3$. +The multiple tau correlator provides a solution to compute the +correlation functions for arbitrary range of the lag times by +coarse-graining the high $\tau$ values. It applies the naive algorithm +to a relatively small range of lag times $\tau \in [0:p-1]$. This we refer +to as compression level 0. To compute the correlations for lag times +$\tau \in [p:2(p-1)]$, the original data are first coarse-grained, so +that $m$ values of the original data are compressed to produce a single +data point in the higher compression level. Thus the lag time between +the neighbouring values in the higher compression level increases +by a factor of $m$, while the number of stored values decreases by +the same factor and the number of correlation operations at this level +reduces by a factor of $m^2$. Correlations for lag times +$\tau \in [2p:4(p-1)]$ are computed at compression level 2, which is created +in an analogous manner from level 1. This can continue hierarchically +up to an arbitrary level for which enough data is available. Due to the +hierarchical reduction of the data, the algorithm scales as +${\cal O} \bigl( p^2 \log(\taumax) \bigr)$. Thus an additional order +of magnitude in $\taumax$ costs just a constant extra effort. + +The speedup is gained at the expense of statistical accuracy. +The loss of accuracy occurs at the compression step. +In principle one can use any value of $m$ and $p$ to tune the algorithm +performance. However, it turns out that using a high $m$ dilutes the +data at high $\tau$. Therefore $m=2$ is hardcoded in the \es correlator +and cannot be modified by user. The value of $p$ remains an adjustable +parameter which can be modified by user by setting \lit{tau_lin} +when defining a correlation. In genral, one should choose $p \gg m$ +to avoid loss of statistical accuracy. Choosing $p=16$ seems to be +safe but it may depend on the properties of the analyzed +corerlation functions. A detailed analysis has been performed +in Ref.~\cite{ramirez10a}. + +The choice of the compression function also influences the statistical +accuracy and can even lead to systematic errors. The default compression +function is \lit{discard2} which discards the second fo the compressed +values and pushes the first one to the higher level. This is robust and +can be applied universally to any combination of observables and +correlation operation. On the other hand, it reduces the +statistical accuracy as the compression level increases. +In many cases, the \lit{average} compression operation +can be applied, which averages the two neighbouring values +and the average then enters the higher level, preserving +almost the full statistical accuracy of the original data. +In general, if averaging can be safely used or not, depends on the +properties of the difference +\begin{equation} +\frac{1}{2} (A_i \otimes B_{i+p} + A_{i+1} \otimes B_{i+p+1} ) - +\frac{1}{2} (A_i + A_{i+1} ) \otimes \frac{1}{2} (B_{i+p} + B_{i+p+1}) +\label{eq:difference} +\end{equation} +For example in the case of velocity autocorrelation function, the +above-mentioned difference has a small value and a random sign, \ie\ +different contributions cancel each other. On the other hand, in the +of the case of mean square displacmenent the difference is always positive, +resulting in a non-negligible systematic error. A more general +discussion is presented in Ref.~\cite{ramirez10a}. + \section{\lit{uwerr}: Computing statistical errors in time series} \label{sec:uwerr} diff --git a/doc/ug/bibliography.bib b/doc/ug/bibliography.bib index d15645321bc..927a321162c 100644 --- a/doc/ug/bibliography.bib +++ b/doc/ug/bibliography.bib @@ -1,4 +1,4 @@ -% This file was created with JabRef 2.6. +% This file was created with JabRef 2.7. % Encoding: ISO8859_1 @ARTICLE{andersen80a, @@ -57,6 +57,35 @@ @ARTICLE{andersen83a pages = {24--34} } +@ARTICLE{arnold02a, + author = {Axel Arnold and Christian Holm}, + title = {{MMM2D}: A fast and accurate summation methodlimnb for electrostatic + interactions in 2D slab geometries}, + journal = {Comput. Phys. Commun.}, + year = {2002}, + volume = {148}, + pages = {327--348}, + number = {3}, + month = {1 } # nov, + eprint = {cond-mat/0202265}, + file = {arnold02a.pdf:arnold02a.pdf:PDF}, + owner = {olenz}, + timestamp = {2007.06.13} +} + +@ARTICLE{arnold02b, + author = {Axel Arnold and Christian Holm}, + title = {A novel method for calculating electrostatic interactions in 2{D} + periodic slab geometries}, + journal = {Chem. Phys. Lett.}, + year = {2002}, + volume = {354}, + pages = {324--330}, + file = {arnold02b.pdf:arnold02b.pdf:PDF}, + owner = {olenz}, + timestamp = {2007.06.13} +} + @ARTICLE{arnold02c, author = {Axel Arnold and Jason {de Joannis} and Christian Holm}, title = {{Electrostatics in Periodic Slab Geometries I}}, @@ -98,35 +127,6 @@ @ARTICLE{arnold05b timestamp = {2007.06.13} } -@ARTICLE{arnold02a, - author = {Axel Arnold and Christian Holm}, - title = {{MMM2D}: A fast and accurate summation methodlimnb for electrostatic - interactions in 2D slab geometries}, - journal = {Comput. Phys. Commun.}, - year = {2002}, - volume = {148}, - pages = {327--348}, - number = {3}, - month = {1 } # nov, - eprint = {cond-mat/0202265}, - file = {arnold02a.pdf:arnold02a.pdf:PDF}, - owner = {olenz}, - timestamp = {2007.06.13} -} - -@ARTICLE{arnold02b, - author = {Axel Arnold and Christian Holm}, - title = {A novel method for calculating electrostatic interactions in 2{D} - periodic slab geometries}, - journal = {Chem. Phys. Lett.}, - year = {2002}, - volume = {354}, - pages = {324--330}, - file = {arnold02b.pdf:arnold02b.pdf:PDF}, - owner = {olenz}, - timestamp = {2007.06.13} -} - @ARTICLE{berendsen84a, author = {H. J. C. Berendsen and J. P. M. Postma and W. F. van Gunsteren and A. DiNola and J. R. Haak}, @@ -162,6 +162,16 @@ @ARTICLE{cerda08a timestamp = {2009.01.09} } +@INBOOK{deserno00, + chapter = {How to mesh up {E}wald sums.}, + title = {Molecular Dynamics on Parallel Computers}, + publisher = {World Scientific, Singapore}, + year = {2000}, + author = {M. Deserno and C. Holm and H. J. Limbach}, + owner = {RDLimbacHJ}, + timestamp = {2007.08.29} +} + @PHDTHESIS{deserno00a, author = {M. Deserno}, title = {Counterion condensation for rigid linear polyelectrolytes}, @@ -193,16 +203,6 @@ @ARTICLE{deserno98a timestamp = {2007.08.29} } -@INBOOK{deserno00, - chapter = {How to mesh up {E}wald sums.}, - title = {Molecular Dynamics on Parallel Computers}, - publisher = {World Scientific, Singapore}, - year = {2000}, - author = {M. Deserno and C. Holm and H. J. Limbach}, - owner = {RDLimbacHJ}, - timestamp = {2007.08.29} -} - @BOOK{doi86a, title = {The theory of polymer dynamics}, publisher = {Oxford Science Publications}, @@ -331,6 +331,15 @@ @ARTICLE{kremer90a timestamp = {2007.06.15} } +@PHDTHESIS{limbach01, + author = {Hans J{\"o}rg Limbach}, + title = {Struktur und Eigenschaften von Polyelektrolyten im schlechten L{\"o}sungsmittel}, + school = {Universit{\"a}t Mainz}, + year = {2001}, + owner = {RDLimbacHJ}, + timestamp = {2007.08.29} +} + @ARTICLE{limbach03a, author = {H. J. Limbach and C. Holm}, title = {Single-Chain Properties of Polyelectrolytes in Poor Solvent}, @@ -362,13 +371,37 @@ @ARTICLE{limbach03a timestamp = {2007.06.13} } -@PHDTHESIS{limbach01, - author = {Hans J{\"o}rg Limbach}, - title = {Struktur und Eigenschaften von Polyelektrolyten im schlechten L{\"o}sungsmittel}, - school = {Universit{\"a}t Mainz}, +@ARTICLE{magatti01a, + author = {Magatti, D and Ferri, F}, + title = {Fast multi-tau real-time software correlator for dynamic light scattering}, + journal = {Applied Optics}, year = {2001}, - owner = {RDLimbacHJ}, - timestamp = {2007.08.29} + volume = {40}, + pages = {4011-4021}, + number = {{24}}, + month = {{AUG 20}}, + abstract = {{We present a PC-based multi-tau software correlator suitable for + processing dynamic light-scattering data. The correlator is based + on a simple algorithm that was developed with the graphical programming + language LabVIEW, according to which the incoming data are processed + on line without any storage on the hard disk. By use of a standard + photon-counting unit, a National Instruments Model 6602-PCI timer-counter, + and a 550-MHz Pentium III personal computer, correlation functions + can be worked out in full real-time overtime scales of similar to5 + mus and in batch processing down to time scales of similar to 300 + ns. The latter limit is imposed by the speed of data transfer between + the counter and the PC's memory and thus is prone to be progressively + reduced with future technological development. Testing of the correlator + and evaluation of its performances were carried out by use of dilute + solutions of calibrated polystyrene spheres. Our results indicate + that the correlation functions are determined with such precision + that the corresponding particle diameters can be recovered to within + an accuracy of a few percent rms. (C) 2001 Optical Society of America.}}, + doi = {{10.1364/AO.40.004011}}, + issn = {{0003-6935}}, + owner = {kosovan}, + timestamp = {2012.02.14}, + unique-id = {{ISI:000170543300006}} } @ARTICLE{maggs02a, @@ -449,6 +482,17 @@ @ARTICLE{poblete10 number = {11} } +@ARTICLE{praprotnik05, + author = {Matej Praprotnik and Luigi {Delle Site} and Kurt Kremer}, + title = {Adaptive resolution molecular-dynamics simulation: Changing the degrees + of freedom on the fly}, + journal = {The Journal of Chemical Physics}, + year = {2005}, + volume = {123}, + pages = {224106--14}, + number = {22} +} + @ARTICLE{praprotnik08, author = {Matej Praprotnik and Luigi {Delle Site} and Kurt Kremer}, title = {Multiscale Simulation of Soft Matter: From Scale Bridging to Adaptive @@ -460,15 +504,38 @@ @ARTICLE{praprotnik08 number = {1} } -@ARTICLE{praprotnik05, - author = {Matej Praprotnik and Luigi {Delle Site} and Kurt Kremer}, - title = {Adaptive resolution molecular-dynamics simulation: Changing the degrees - of freedom on the fly}, - journal = {The Journal of Chemical Physics}, - year = {2005}, - volume = {123}, - pages = {224106--14}, - number = {22} +@ARTICLE{ramirez10a, + author = {Ramirez, Jorge and Sukumaran, Sathish K. and Vorselaars, Bart and + Likhtman, Alexei E.}, + title = {Efficient on the fly calculation of time correlation functions in + computer simulations}, + journal = {J. Chem. Phys.}, + year = {2010}, + volume = {133}, + pages = {154103}, + number = {{15}}, + month = {{OCT 21}}, + abstract = {{Time correlation functions yield profound information about the dynamics + of a physical system and hence are frequently calculated in computer + simulations. For systems whose dynamics span a wide range of time, + currently used methods require significant computer time and memory. + In this paper, we discuss the multiple-tau correlator method for + the efficient calculation of accurate time correlation functions + on the fly during computer simulations. The multiple-tau correlator + is efficacious in terms of computational requirements and can be + tuned to the desired level of accuracy. Further, we derive estimates + for the error arising from the use of the multiple-tau correlator + and extend it for use in the calculation of mean-square particle + displacements and dynamic structure factors. The method described + here, in hardware implementation, is routinely used in light scattering + experiments but has not yet found widespread use in computer simulations. + (C) 2010 American Institute of Physics. {[}doi:10.1063/1.3491098]}}, + article-number = {{154103}}, + doi = {{10.1063/1.3491098}}, + issn = {{0021-9606}}, + owner = {kosovan}, + timestamp = {2012.02.14}, + unique-id = {{ISI:000283359300008}} } @BOOK{rubinstein03a, @@ -481,6 +548,23 @@ @BOOK{rubinstein03a timestamp = {2011.01.27} } +@ARTICLE{schatzel88a, + author = {Sch\"atzel, K. and Drewel, M. and Stimac, S}, + title = {Photon-correlation Measurements at Large Lag Times - Improving Statistical + Accuracy}, + journal = {Journal of Modern Optics}, + year = {1988}, + volume = {35}, + pages = {711-718}, + number = {{4}}, + month = {{APR}}, + doi = {{10.1080/09500348814550731}}, + issn = {{0950-0340}}, + owner = {kosovan}, + timestamp = {2012.02.14}, + unique-id = {{ISI:A1988N368800009}} +} + @ARTICLE{schmitz00a, author = {Heiko Schmitz and Florian Muller-Plathe}, title = {Calculation of the lifetime of positronium in polymers via molecular @@ -520,13 +604,13 @@ @ARTICLE{smith81a timestamp = {2007.06.13} } -@BOOK{succi01a, - title = {The lattice Boltzmann equation for fluid dynamics and beyond}, - publisher = {Oxford University Press, USA}, +@ARTICLE{soddeman01a, + author = {T. Soddemann and B. D\"unweg and K. Kremer}, + title = {A generic computer model for amphiphilic systems}, + journal = {Eur. Phys. J. E}, year = {2001}, - author = {Succi, S.}, - owner = {georg}, - timestamp = {2011.07.22} + volume = {6}, + pages = {409} } @ARTICLE{soddeman03a, @@ -539,15 +623,6 @@ @ARTICLE{soddeman03a pages = {046702} } -@ARTICLE{soddeman01a, - author = {T. Soddemann and B. D\"unweg and K. Kremer}, - title = {A generic computer model for amphiphilic systems}, - journal = {Eur. Phys. J. E}, - year = {2001}, - volume = {6}, - pages = {409} -} - @PHDTHESIS{strebel99a, author = {R. Strebel}, title = {{Pieces of software for the Coulombic $m$ body problem}}, @@ -561,8 +636,17 @@ @PHDTHESIS{strebel99a url = {http://e-collection.ethbib.ethz.ch/show?type=diss\&nr=13504} } +@BOOK{succi01a, + title = {The lattice Boltzmann equation for fluid dynamics and beyond}, + publisher = {Oxford University Press, USA}, + year = {2001}, + author = {Succi, S.}, + owner = {georg}, + timestamp = {2011.07.22} +} + @ARTICLE{thompson09a, - author = {Thompson, A.~ P. and Plimpton, S.~ J. and Mattson, W.}, + author = {Thompson, A.~ P. and Plimpton, S.~ J. and Mattson, W.}, title = {General formulation of pressure and stress tensor for arbitrary many-body interaction potentials under periodic boundary conditions}, journal = {Journal of Chemical Physics}, @@ -573,6 +657,17 @@ @ARTICLE{thompson09a timestamp = {2011.05.25} } +@ARTICLE{tyagi07a, + author = {S. Tyagi and A. Arnold and C. Holm}, + title = {{ICMMM2D}: An accurate method to include planar dielectric interfaces + via image charge summation}, + journal = {J. Chem. Phys.}, + year = {2007}, + volume = {127}, + pages = {154723}, + timestamp = {2009.03.16} +} + @ARTICLE{tyagi08a, author = {Sandeep Tyagi and Axel Arnold and Christian Holm}, title = {Electrostatic layer correction with image charges: A linear scaling @@ -595,17 +690,6 @@ @ARTICLE{tyagi08a timestamp = {2009.03.16} } -@ARTICLE{tyagi07a, - author = {S. Tyagi and A. Arnold and C. Holm}, - title = {{ICMMM2D}: An accurate method to include planar dielectric interfaces - via image charge summation}, - journal = {J. Chem. Phys.}, - year = {2007}, - volume = {127}, - pages = {154723}, - timestamp = {2009.03.16} -} - @ARTICLE{wolff04a, author = {Ulli Wolff}, title = {Monte Carlo errors with less errors},