# # patch "ChangeLog" # from [acc5ad76d8f930a7565d577698b28d5e2ef3d341] # to [cb4ecce48bf00235d35e542a8939c79b891e0e82] # # patch "tests/t_dump_load.at" # from [168c3798cf11afd072b480f26a3bd23c1d9fc597] # to [5732629f748a3a18612b87e708c3cb85ac918e87] # # patch "tests/t_netsync_largish_file.at" # from [8755303c05b12e8425240a7e2afdab6aac8c1c09] # to [378eb7ed343ad9a0e8caef98f96779d7ce2dc682] # # patch "testsuite.at" # from [a5d2d19404e6806e02418c1d4fad1a511f64ec6b] # to [7f5b300d0781d438772a16ec40edb7364ea5f089] # ======================================================================== --- ChangeLog acc5ad76d8f930a7565d577698b28d5e2ef3d341 +++ ChangeLog cb4ecce48bf00235d35e542a8939c79b891e0e82 @@ -1,3 +1,13 @@ +2005-11-27 Matthew Gregan + + * tests/t_netsync_largish_file.at: Move LARGISH_FILE_CREATE + definition from here... + * testsuite.at: ...to here. + * tests/t_dump_load.at: Use LARGISH_FILE_CREATE to create a 1MB + file so that the db dump output is larger than a few kB. This is + needed to catch the iostreams-returning-EOF-early bogosity under + MinGW when sync_with_stdio(false) has been called on the iostream. + 2005-11-27 Matt Johnston * Makefile.am, configure.ac, pch.hh: add --enable-pch configure ======================================================================== --- tests/t_dump_load.at 168c3798cf11afd072b480f26a3bd23c1d9fc597 +++ tests/t_dump_load.at 5732629f748a3a18612b87e708c3cb85ac918e87 @@ -1,6 +1,8 @@ AT_SETUP([database dump/load]) MONOTONE_SETUP +LARGISH_FILE_CREATE(largish, 1) + AT_CHECK((echo foo; echo foo) | MONOTONE genkey foo, [], [ignore], [ignore]) ADD_FILE(testfile1, [blah balh ]) @@ -9,6 +11,10 @@ ]) ADD_FILE(testfile2, [foo foo ]) + +# include a largish file in the dump, so we can test for iostream breakage on +# MinGW wrt sync_with_stdio(). +AT_CHECK(MONOTONE add largish, [], [ignore], [ignore]) COMMIT(branch2) # run a db analyze so that SQLite creates any internal tables and indices, ======================================================================== --- tests/t_netsync_largish_file.at 8755303c05b12e8425240a7e2afdab6aac8c1c09 +++ tests/t_netsync_largish_file.at 378eb7ed343ad9a0e8caef98f96779d7ce2dc682 @@ -1,22 +1,12 @@ AT_SETUP([netsync largish file]) AT_KEYWORDS([netsync]) # Check that we can netsync a 32MB file. -# We use awk(1) to generate an incompressible file, since the file will be -# compressed in the monotone database and on the wire. - -m4_define([LARGISH_FILE_CREATE], [ -awk -- 'BEGIN{srand(5253);for(a=0;a<1024*1024;a+=20)printf("%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c",rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256);}' > largish.tmp -cat largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp largish.tmp > largish -rm -f largish.tmp -]) - - MONOTONE_SETUP NETSYNC_SETUP -AT_CHECK(LARGISH_FILE_CREATE) +LARGISH_FILE_CREATE(largish, 32) AT_CHECK(MONOTONE add largish, [], [ignore], [ignore]) COMMIT(testbranch) ======================================================================== --- testsuite.at a5d2d19404e6806e02418c1d4fad1a511f64ec6b +++ testsuite.at 7f5b300d0781d438772a16ec40edb7364ea5f089 @@ -453,6 +453,17 @@ # run as TAIL(lines) m4_define([TAIL], [(tail -n $1 2>/dev/null || tail -$1)]) +# run as LARGISH_FILE_CREATE(filename, size) to create a MB file of +# uncompressible data +m4_define([LARGISH_FILE_CREATE], [ +awk -- 'BEGIN{srand(5253);for(a=0;a<1024*1024;a+=20)printf("%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c%c",rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256,rand()*256);}' > largish.tmp +i=$2 +while test $i -gt 0; do + cat largish.tmp >> $1 + i=$((i - 1)) +done +rm -f largish.tmp +]) # include all the sub-tests we're going to use